+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.RATOsPId8A --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools [2 ymakes processing] [7694/7694 modules configured] [1981/4128 modules rendered] [2 ymakes processing] [7694/7694 modules configured] [4083/4128 modules rendered] [2 ymakes processing] [7694/7694 modules configured] [4128/4128 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7700/7700 modules configured] [4128/4128 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 1.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut | 2.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a | 2.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a | 3.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a | 4.8%| PREPARE $(VCS) | 6.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a | 9.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a | 9.8%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |10.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |12.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |14.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |14.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |16.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |16.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |17.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |17.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |17.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |17.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |17.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |17.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |17.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |17.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |17.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |17.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |17.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |18.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |18.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |18.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_data_erasure_manager.cpp |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |18.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |18.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/tool |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |19.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |20.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |23.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |23.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |23.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |24.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |24.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |24.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |24.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |25.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |25.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |25.5%| PREPARE $(FLAKE8_PY2-2255386470) |25.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |25.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |25.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |26.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |26.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |26.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |26.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |26.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |26.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |26.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |26.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |27.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |27.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |27.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |27.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |27.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |27.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |27.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |27.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |28.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |28.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |28.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |28.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |28.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |28.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |28.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |28.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |28.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |28.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |29.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |29.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |29.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |30.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |30.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |30.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |30.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |30.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |30.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |30.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |30.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |30.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__data_erasure_manager.cpp |31.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |31.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |31.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |31.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |31.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |31.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |31.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |31.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |31.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |31.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |31.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |32.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |33.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |33.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |33.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |33.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |33.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |33.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |33.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |34.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |34.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |34.8%| PREPARE $(CLANG_FORMAT-1286082657) |34.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |34.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |35.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |35.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |35.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |35.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |35.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |35.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |36.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |36.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |36.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |36.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |36.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |36.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |36.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |36.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |36.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |36.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |36.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |39.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |41.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/tools/yqlrun/yqlrun |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |41.6%| PREPARE $(PYTHON) |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |42.2%| PREPARE $(FLAKE8_PY3-715603131) |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |44.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.so |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |45.0%| PREPARE $(LLD_ROOT-3808007503) |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |45.4%| PREPARE $(TEST_TOOL_HOST-sbr:8580453620) |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |49.1%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |50.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |50.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |50.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |50.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/ydb-tests-example |50.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/local_ydb |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |50.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |50.9%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |50.8%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |50.8%| PREPARE $(YMAKE_PYTHON3-4256832079) |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |51.1%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |51.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |51.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/libpy3solomon_recipe.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |51.4%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/flake8_linter/flake8_linter |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |51.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_data_erasure_manager.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |51.8%| PREPARE $(BLACK_LINTER-sbr:8415400280) |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/batch/libkqp-common-batch.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |52.0%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/black_linter/black_linter |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/libpy3ydb_supp.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |52.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |52.5%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/libpy3statistics_workload.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |52.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |52.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |51.5%| PREPARE $(CLANG-1922233694) |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/libpy3benchmarks-runner-result_compare.global.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |49.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |49.0%| PREPARE $(WITH_JDK-sbr:7832760150) |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |48.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/lib/libtools-yqlrun-lib.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/tools/yqlrun/yqlrun.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/libpy3olap_workload.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |45.6%| PREPARE $(JDK_DEFAULT-472926544) |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |44.8%| PREPARE $(GDB) |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |44.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |45.0%| PREPARE $(WITH_JDK17-sbr:7832760150) |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |45.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |45.2%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/mdb_mock/libpy3recipe.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |45.3%| PREPARE $(JDK17-472926544) |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |45.4%| RESOURCE $(sbr:4966407557) |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |45.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |45.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |45.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |45.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |45.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |45.6%| RESOURCE $(sbr:770480022) |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |45.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |45.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |45.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |45.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |45.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |45.9%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |46.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |46.1%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |46.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |46.4%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part4/ydb-library-yql-tests-sql-dq_file-part4 |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part2/ydb-library-yql-tests-sql-dq_file-part2 |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part6/ydb-library-yql-tests-sql-dq_file-part6 |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part3/ydb-library-yql-tests-sql-dq_file-part3 |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |46.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part11/ydb-library-yql-tests-sql-dq_file-part11 |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |46.5%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |46.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part15/ydb-library-yql-tests-sql-dq_file-part15 |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |46.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part16/ydb-library-yql-tests-sql-dq_file-part16 |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/ydb-library-yql-tests-sql-hybrid_file-part5 |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/ydb-library-yql-tests-sql-hybrid_file-part6 |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part8/ydb-library-yql-tests-sql-dq_file-part8 |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part1/ydb-library-yql-tests-sql-dq_file-part1 |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/ydb-library-yql-tests-sql-hybrid_file-part3 |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part14/ydb-library-yql-tests-sql-dq_file-part14 |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part18/ydb-library-yql-tests-sql-dq_file-part18 |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/ydb-library-yql-tests-sql-hybrid_file-part10 |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/ydb-library-yql-tests-sql-hybrid_file-part0 |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part0/ydb-library-yql-tests-sql-dq_file-part0 |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part9/ydb-library-yql-tests-sql-dq_file-part9 |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part7/ydb-library-yql-tests-sql-dq_file-part7 |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/ydb-library-yql-tests-sql-hybrid_file-part9 |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part5/ydb-library-yql-tests-sql-dq_file-part5 |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part12/ydb-library-yql-tests-sql-dq_file-part12 |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/ydb-library-yql-tests-sql-hybrid_file-part4 |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/ydb-library-yql-tests-sql-hybrid_file-part7 |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |46.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part10/ydb-library-yql-tests-sql-dq_file-part10 |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |46.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part17/ydb-library-yql-tests-sql-dq_file-part17 |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |46.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part19/ydb-library-yql-tests-sql-dq_file-part19 |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |46.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/ydb-library-yql-tests-sql-hybrid_file-part8 |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/ydb-library-yql-tests-sql-hybrid_file-part1 |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/ydb-library-yql-tests-sql-hybrid_file-part2 |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part13/ydb-library-yql-tests-sql-dq_file-part13 |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |47.1%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/libpy3simple_queue.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |47.8%| PREPARE $(CLANG16-1380963495) |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |47.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_service/interface/libfmr-yt_service-interface.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |48.0%| PREPARE $(CLANG18-1866954364) |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |48.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |48.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |48.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |48.4%| PREPARE $(CLANG-874354456) |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |48.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |48.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |48.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |48.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |48.7%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |49.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |49.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a >> __main__.py::flake8 [GOOD] >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a >> __main__.py::flake8 [GOOD] |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |49.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering >> __main__.py::flake8 [GOOD] |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |49.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a >> test_schemeshard_limits.py::flake8 [GOOD] |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |49.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a >> test.py::flake8 [GOOD] |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a >> test.py::py2_flake8 [GOOD] |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |49.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a >> test_stability.py::flake8 [GOOD] >> ydb_supp::import_test [GOOD] |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |49.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a >> test.py::py2_flake8 [GOOD] |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a >> tablet_scheme_tests.py::flake8 [GOOD] |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a >> conftest.py::flake8 [GOOD] |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a >> test_serverless.py::flake8 [GOOD] |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |49.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/flake8 >> __main__.py::flake8 [GOOD] |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a >> run_tests.py::flake8 [GOOD] >> test_transform.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |49.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |49.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a >> integrations_test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |49.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/flake8 >> __main__.py::flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/flake8 >> __main__.py::flake8 [GOOD] |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a >> test.py::flake8 [GOOD] |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a >> test_log_scenario.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |49.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/template/ut/flake8 >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |49.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |49.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_generator.py::flake8 [GOOD] >> test_init.py::flake8 [GOOD] >> ydb-tests-fq-streaming_optimize::import_test [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |49.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_service/impl/libfmr-yt_service-impl.a |49.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/flake8 >> test_stability.py::flake8 [GOOD] |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a >> __main__.py::flake8 [GOOD] |49.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/supp/import_test >> ydb_supp::import_test [GOOD] |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |49.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |49.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |49.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |49.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |49.8%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |49.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/flake8 >> test_transform.py::flake8 [GOOD] |49.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] |49.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |49.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/flake8 >> integrations_test.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |49.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |49.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/flake8 >> __main__.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a >> test.py::py2_flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |50.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |50.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/mdb_mock/flake8 >> __main__.py::flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |50.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |50.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |50.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |50.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/libfmr-table_data_service-local.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |50.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a >> test_sql_streaming.py::flake8 [GOOD] >> __main__.py::black [GOOD] >> test_dump_restore.py::flake8 [GOOD] |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_service/file/libfmr-yt_service-file.a |50.1%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_mysql.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/flake8 >> test_init.py::flake8 [GOOD] |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/import_test >> ydb-tests-fq-streaming_optimize::import_test [GOOD] |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |50.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/flake8 >> __main__.py::flake8 [GOOD] |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a >> conftest.py::flake8 [GOOD] |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_04f56802b68450abc8421282d0.o >> __main__.py::flake8 [GOOD] |50.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/libpy3solomon_recipe_grpc.global.a >> test_async_replication.py::flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |50.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |50.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/libpy3benchmarks-runner-runner.global.a >> test.py::flake8 [GOOD] |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/libpy3oltp_workload.global.a |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> ydb_recipe::import_test [GOOD] >> test_postgres.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> ydb-core-viewer-tests::import_test [GOOD] >> test.py::py2_flake8 [GOOD] >> ydb-tests-functional-api::import_test [GOOD] >> ydb-tests-functional-sqs-common::import_test [GOOD] >> ydb-tests-functional-config::import_test [GOOD] |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp >> test_leader_start_inflight.py::flake8 [GOOD] |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] >> ydb-tests-functional-limits::import_test [GOOD] >> test_s3.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_copy_table.py::flake8 [GOOD] |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/mdb_mock/black >> __main__.py::black [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb-tests-functional-sqs-messaging::import_test [GOOD] >> test.py::flake8 [GOOD] |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp >> column_table_helper.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> ydb-tests-functional-benchmarks_init::import_test [GOOD] >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/flake8 >> test_dump_restore.py::flake8 [GOOD] |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] |50.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] >> test.py::py2_flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a >> ydb-tests-functional-serverless::import_test [GOOD] |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] >> ydb-tests-functional-sqs-large::import_test [GOOD] |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> tier_delete.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] >> test_mixed.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |50.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.so >> select_datetime.py::flake8 [GOOD] |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tools/solomon_emulator_grpc/flake8 >> __main__.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp >> test.py::flake8 [GOOD] |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/flake8 >> test_async_replication.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_serializable.py::flake8 [GOOD] |50.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |50.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp >> test.py::flake8 [GOOD] |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |50.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/import_test >> ydb_recipe::import_test [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/import_test >> ydb-core-viewer-tests::import_test [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |50.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/import_test >> ydb-tests-functional-api::import_test [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/import_test >> ydb-tests-functional-config::import_test [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |50.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication >> allure_utils.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/import_test >> ydb-tests-functional-limits::import_test [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/flake8 >> test_s3.py::flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/flake8 >> test_copy_table.py::flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |50.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/import_test >> ydb-tests-functional-benchmarks_init::import_test [GOOD] >> ydb-tests-tools-nemesis-ut::import_test [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] >> ydb-tests-functional-audit::import_test [GOOD] |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/libpy3s3_recipe.global.a |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/import_test >> ydb-tests-functional-sqs-large::import_test [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> ydb-tests-datashard-dump_restore::import_test [GOOD] >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/flake8 >> test_mixed.py::flake8 [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/flake8 >> test_serializable.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/token_accessor_mock/libpy3recipe.global.a >> test_split_merge.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/import_test >> ydb-tests-functional-audit::import_test [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/import_test >> ydb-tests-tools-nemesis-ut::import_test [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_dml.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |51.0%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/report/ut/flake8 >> test.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |51.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a >> test.py::py2_flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/import_test >> ydb-tests-datashard-dump_restore::import_test [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] >> test_http_api.py::flake8 [GOOD] |51.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/flake8 >> __main__.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp >> compare.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/flake8 >> __main__.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_disposition.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] |51.2%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |51.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> __main__.py::flake8 [GOOD] >> runner.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/s3_recipe/flake8 >> __main__.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/flake8 >> test_split_merge.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] >> ydb-tests-datashard-split_merge::import_test [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |51.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tools/solomon_emulator/recipe/flake8 >> __main__.py::flake8 [GOOD] |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/supp/flake8 >> __main__.py::flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/runner |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |51.5%| [TS] {RESULT} ydb/tests/datashard/async_replication/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |51.5%| [TS] {RESULT} ydb/tests/stress/simple_queue/flake8 |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |51.6%| [TS] {RESULT} ydb/tests/olap/flake8 |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/token_accessor_mock/flake8 >> __main__.py::flake8 [GOOD] |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/import_test >> ydb-tests-datashard-split_merge::import_test [GOOD] |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |51.6%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |51.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test_multinode_cluster.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |51.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> conftest.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> solomon_recipe::import_test [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_distconf.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a >> test_greenplum.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] >> test_mysql.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |50.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> runner::import_test [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_disk.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tools/solomon_emulator/recipe/import_test >> solomon_recipe::import_test [GOOD] |50.9%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |50.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |50.9%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |50.9%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |50.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |50.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |50.9%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_validation.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] >> overlapping_portions.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] >> tpc_tests.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/flake8 >> __main__.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/flake8 >> __main__.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/import_test >> runner::import_test [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/flake8 >> __main__.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] |51.2%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/flake8 >> test.py::flake8 [GOOD] >> test_break.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |51.3%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |51.3%| [TS] {RESULT} ydb/tests/functional/api/flake8 >> test.py::py2_flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] |51.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |51.3%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 >> test_encryption.py::flake8 [GOOD] |51.3%| [TS] {RESULT} ydb/tests/functional/large_serializable/flake8 |51.3%| [TS] {RESULT} ydb/public/tools/ydb_recipe/flake8 |51.3%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 >> ydb-tests-datashard-async_replication::import_test [GOOD] |51.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |51.3%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |51.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |51.4%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |51.4%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/flake8 >> gen-report.py::flake8 [GOOD] >> ydb-tests-fq-yds::import_test [GOOD] >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |51.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] >> ydb-tests-stress-log-tests::import_test [GOOD] |51.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_7406de026bf25e30e96a88517d.o |51.4%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |51.4%| [TS] {RESULT} ydb/library/yql/tools/solomon_emulator_grpc/flake8 |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/flake8 >> test_break.py::flake8 [GOOD] |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] >> ydb-tests-example::import_test [GOOD] >> ydb-tests-functional-tpc-large::import_test [GOOD] >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/statistics_workload/flake8 >> __main__.py::flake8 [GOOD] |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |51.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/run_tests |51.6%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/import_test >> ydb-tests-datashard-async_replication::import_test [GOOD] |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |51.6%| [TS] {RESULT} ydb/tests/tools/mdb_mock/black |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] |51.6%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/import_test |51.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/olap_workload |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/import_test >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |51.6%| [TS] {RESULT} ydb/tests/functional/serverless/import_test |51.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/import_test >> ydb-tests-stress-log-tests::import_test [GOOD] |51.7%| [TS] {RESULT} ydb/tests/supp/import_test |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |51.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |51.7%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |51.7%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/import_test >> ydb-tests-example::import_test [GOOD] |51.7%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/import_test >> ydb-tests-functional-tpc-large::import_test [GOOD] |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] |51.7%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 |51.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |51.8%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |51.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/libpy3benchmarks-runner-result_convert.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |51.8%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 >> ydb-tests-functional-ydb_cli::import_test [GOOD] |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/libpy3benchmarks-runner-run_tests.global.a |51.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp >> local_ydb::import_test [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |51.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> olap_workload::import_test [GOOD] |51.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |51.8%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |51.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/report/ut/ydb-library-benchmarks-report-ut |51.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/token_accessor_mock/recipe |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |51.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |51.8%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/import_test |51.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] |51.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/import_test >> local_ydb::import_test [GOOD] |51.9%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/import_test >> olap_workload::import_test [GOOD] >> run_tests::import_test [GOOD] |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |51.9%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ydb_cli >> test.py::Test::test_add_calculated [GOOD] |51.9%| [TS] {RESULT} ydb/tests/stress/statistics_workload/flake8 |51.9%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/import_test >> test.py::Test::test_add_duplicate [GOOD] >> test.py::Test::test_add_from_file [GOOD] >> test.py::Test::test_add_from_lines [GOOD] >> test.py::Test::test_add_one [GOOD] >> test.py::Test::test_add_one_error [GOOD] >> test.py::Test::test_add_one_with_empty [GOOD] >> test.py::Test::test_add_shame_rate [GOOD] >> test.py::Test::test_add_similar_errors [GOOD] >> test.py::Test::test_add_special [GOOD] >> test.py::Test::test_add_two_error [GOOD] >> test.py::Test::test_build [GOOD] >> test.py::Test::test_build_md [GOOD] >> test.py::Test::test_build_shame_md [GOOD] >> test.py::Test::test_build_shame_sum_md [GOOD] >> test.py::Test::test_build_sum_md [GOOD] >> test.py::Test::test_build_sums [GOOD] >> test.py::Test::test_build_sums_shame [GOOD] >> test.py::Test::test_create [GOOD] >> test.py::Test::test_display [GOOD] >> test.py::Test::test_immutable_special [GOOD] |51.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |51.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |51.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_6403bfa5c5e35b29a21c73fb0e.o |51.9%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |51.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/import_test >> run_tests::import_test [GOOD] |51.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |51.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/report/ut/py3test >> test.py::Test::test_immutable_special [GOOD] Test command err: ydb/library/benchmarks/report/__init__.py:187: DeprecationWarning: the 'MARKDOWN' constant is deprecated, use the 'TableStyle' enum instead self.t.set_style(prettytable.MARKDOWN) |51.1%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> ydb-tests-functional-clickbench::import_test [GOOD] |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |51.0%| COMPACTING CACHE 16.8MiB |51.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> ydb-library-benchmarks-report-ut::import_test [GOOD] |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |51.0%| [TS] {RESULT} ydb/tests/olap/load/flake8 |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.so |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/report/ut/import_test >> ydb-library-benchmarks-report-ut::import_test [GOOD] |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 >> recipe::import_test [GOOD] |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompressor/rescompressor |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompiler/rescompiler |51.0%| [TS] {RESULT} ydb/library/benchmarks/template/ut/flake8 |51.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/token_accessor_mock/import_test >> recipe::import_test [GOOD] |51.0%| [TS] {RESULT} ydb/tests/functional/tpc/large/import_test |51.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/template/ut/ydb-library-benchmarks-template-ut |51.1%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |51.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |51.1%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/import_test >> solomon_recipe_grpc::import_test [GOOD] |51.1%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |51.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tools/solomon_emulator_grpc/import_test >> solomon_recipe_grpc::import_test [GOOD] |51.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |51.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |51.1%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |51.1%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |51.1%| [TS] {RESULT} ydb/tests/functional/minidumps/flake8 |51.1%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} >> test.py::Test::test_add [GOOD] >> test.py::Test::test_add_vars [GOOD] >> test.py::Test::test_create [GOOD] >> test.py::Test::test_expose_var_from_include [GOOD] >> test.py::Test::test_expose_var_from_var [GOOD] >> test.py::Test::test_include [GOOD] >> test.py::Test::test_include_from_resource [GOOD] >> test.py::Test::test_linked_include [GOOD] >> test.py::Test::test_result_formatter [GOOD] >> test.py::Test::test_result_formatter_dates [GOOD] >> test.py::Test::test_result_formatter_optional [GOOD] >> test.py::Test::test_result_formatter_zeros [GOOD] |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/template/ut/py3test >> test.py::Test::test_result_formatter_zeros [GOOD] |51.2%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |51.2%| [TS] {RESULT} ydb/tests/stress/olap_workload/import_test |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |51.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/test/ydb-library-yql-udfs-common-roaring-test |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |51.2%| [TS] {RESULT} ydb/library/benchmarks/report/ut/flake8 >> ydb-library-benchmarks-template-ut::import_test [GOOD] |51.2%| [TS] {RESULT} ydb/library/yql/tools/solomon_emulator/recipe/import_test |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |51.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/template/ut/import_test >> ydb-library-benchmarks-template-ut::import_test [GOOD] |51.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |51.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |51.3%| [TS] {RESULT} ydb/tests/datashard/dump_restore/flake8 |51.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |51.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |51.3%| [TS] {RESULT} ydb/public/tools/ydb_recipe/import_test |51.3%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |51.3%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |51.3%| [TS] {RESULT} ydb/tests/tools/token_accessor_mock/import_test |51.3%| [TS] {RESULT} ydb/library/benchmarks/report/ut/py3test |51.4%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test |51.4%| [TS] {RESULT} ydb/library/benchmarks/report/ut/import_test |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/udf_resolver/udf_resolver |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |51.4%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import >> ydb-library-yql-udfs-common-roaring-test::import_test [GOOD] |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |51.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/udfs/common/roaring/test/import_test >> ydb-library-yql-udfs-common-roaring-test::import_test [GOOD] |51.4%| [TS] {RESULT} ydb/library/yql/tools/solomon_emulator_grpc/import_test >> s3_recipe::import_test [GOOD] |51.4%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |51.4%| [TS] {RESULT} ydb/tests/sql/large/flake8 |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |51.5%| [TS] {RESULT} ydb/tests/tools/mdb_mock/flake8 |51.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |51.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/s3_recipe/import_test >> s3_recipe::import_test [GOOD] |51.5%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |51.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |51.5%| [TS] {RESULT} ydb/library/benchmarks/template/ut/py3test |51.5%| [TS] {RESULT} ydb/tests/datashard/copy_table/flake8 |51.5%| [TS] {RESULT} ydb/library/benchmarks/template/ut/import_test |51.5%| [TS] {RESULT} ydb/tests/olap/common/flake8 |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/result_convert |51.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |51.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |51.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |51.6%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |51.6%| [TS] {RESULT} ydb/tests/fq/common/flake8 |51.6%| [TS] {RESULT} ydb/tests/functional/sqs/large/import_test |51.6%| [TS] {RESULT} ydb/tests/tools/token_accessor_mock/flake8 >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] |51.6%| [TS] {RESULT} ydb/library/yql/udfs/common/roaring/test/import_test |51.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |51.6%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/import_test >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] |51.7%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/import_test >> result_convert::import_test [GOOD] |51.7%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |51.7%| [TS] {RESULT} ydb/tests/tools/s3_recipe/import_test |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/result_compare |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |51.7%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |51.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/import_test >> result_convert::import_test [GOOD] |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/test/library-yql-udfs-common-clickhouse-client-test |51.7%| [TS] {RESULT} ydb/tests/stability/tool/flake8 |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |51.7%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |51.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |51.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |51.8%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 >> result_compare::import_test [GOOD] |51.8%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |51.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/import_test >> result_compare::import_test [GOOD] |51.8%| [TS] {RESULT} ydb/tests/functional/limits/import_test |51.8%| [TS] {RESULT} ydb/tests/stability/ydb/flake8 |51.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |51.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |51.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/import_test |51.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |51.9%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/import_test |51.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/simple_queue |51.9%| [TS] {RESULT} ydb/tests/functional/api/import_test |51.9%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/import_test |51.9%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |51.9%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |51.9%| [TS] {RESULT} ydb/tests/functional/config/import_test >> library-yql-udfs-common-clickhouse-client-test::import_test [GOOD] |51.9%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 |51.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/udfs/common/clickhouse/client/test/import_test >> library-yql-udfs-common-clickhouse-client-test::import_test [GOOD] |51.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |52.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |52.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |52.0%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/import_test |52.0%| [TS] {RESULT} ydb/tests/library/ut/flake8 |52.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/mdb_mock/recipe |52.0%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |52.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |52.0%| [LD] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/docker_compose |52.0%| [TS] {RESULT} ydb/tests/example/flake8 |52.0%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |52.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |52.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |52.1%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |52.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/test/ydb-library-yql-udfs-common-knn-test |52.1%| [TS] {RESULT} ydb/tests/functional/config/flake8 |52.1%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/flake8 |52.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> ydb-tests-olap-load::import_test [GOOD] >> simple_queue::import_test [GOOD] |52.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |52.1%| [TS] {RESULT} ydb/core/viewer/tests/import_test |52.1%| [TS] {RESULT} ydb/tests/supp/flake8 |52.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/import_test >> ydb-tests-olap-load::import_test [GOOD] |52.1%| [TS] {RESULT} ydb/library/yql/udfs/common/clickhouse/client/test/import_test |52.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |52.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |52.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/import_test >> simple_queue::import_test [GOOD] |52.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner |52.2%| [TS] {RESULT} ydb/tests/stress/olap_workload/flake8 |52.2%| [TS] {RESULT} ydb/tests/datashard/async_replication/import_test |52.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |52.2%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |52.2%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |52.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |52.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> recipe::import_test [GOOD] |52.2%| [TS] {RESULT} ydb/tests/stress/log/tests/import_test |52.2%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |52.2%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |52.3%| [TS] {RESULT} ydb/tests/fq/yds/import_test |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/ydb-tests-stress-mixedpy |52.3%| [TS] {RESULT} ydb/tests/datashard/split_merge/import_test |52.3%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |52.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/mdb_mock/import_test >> recipe::import_test [GOOD] |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |52.3%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |52.3%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test |52.3%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/simple_queue |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |52.4%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |52.4%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/olap_workload |52.4%| [TS] {RESULT} ydb/tests/olap/load/import_test |52.4%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |52.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |52.4%| [TS] {RESULT} ydb/tests/stress/simple_queue/import_test >> ydb-library-yql-udfs-common-knn-test::import_test [GOOD] |52.4%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |52.4%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |52.4%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/flake8 |52.4%| [TS] {RESULT} ydb/tests/example/import_test >> ydb-library-benchmarks-runner::import_test [GOOD] |52.4%| [TS] {RESULT} ydb/tests/datashard/s3/flake8 |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/generator |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/udfs/common/knn/test/import_test >> ydb-library-yql-udfs-common-knn-test::import_test [GOOD] |52.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |52.5%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/import_test >> ydb-library-benchmarks-runner::import_test [GOOD] |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |52.5%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |52.5%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |52.6%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/import_test |52.6%| [TS] {RESULT} ydb/tests/stress/mixedpy/flake8 |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |52.6%| [TS] {RESULT} ydb/tests/tools/mdb_mock/import_test |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |52.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |52.6%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/import_test |52.6%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/flake8 |52.6%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/import_test |52.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |52.6%| [TS] {RESULT} ydb/public/tools/local_ydb/import_test |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |52.6%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |52.7%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/ydb_configure |52.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |52.7%| [TS] {RESULT} ydb/public/tools/local_ydb/flake8 |52.7%| [TS] {RESULT} ydb/library/yql/udfs/common/knn/test/import_test |52.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/import_test |52.7%| [TS] {RESULT} ydb/library/benchmarks/runner/import_test |52.7%| [TS] {RESULT} ydb/tests/sql/flake8 |52.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |52.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |52.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |52.8%| [TS] {RESULT} ydb/tools/cfg/bin/flake8 |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |52.8%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |52.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> ydbd_slice::import_test [GOOD] |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |52.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 |52.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/ydbd_slice/bin/import_test >> ydbd_slice::import_test [GOOD] |52.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 |52.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |52.9%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |52.9%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb >> generator::import_test [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |52.9%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/flake8 |52.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |52.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |52.9%| [TS] {RESULT} ydb/tests/functional/audit/import_test |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |53.0%| [TS] {RESULT} ydb/tests/datashard/split_merge/flake8 |53.0%| [TS] {RESULT} ydb/tests/stress/oltp_workload/flake8 |53.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/import_test >> generator::import_test [GOOD] |53.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/nemesis |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |53.0%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |53.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |53.0%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] |53.1%| [TS] {RESULT} ydb/tests/tools/s3_recipe/flake8 |53.1%| [TS] {RESULT} ydb/library/yql/tools/solomon_emulator/recipe/flake8 |53.1%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |53.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |53.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |53.1%| [TS] {RESULT} ydb/tools/ydbd_slice/bin/import_test |53.1%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/flake8 |53.1%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |53.2%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |53.4%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |53.5%| [TS] {RESULT} ydb/tests/functional/cms/flake8 >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |54.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |54.0%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |54.1%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] |54.2%| [TS] {RESULT} ydb/tests/datashard/dump_restore/import_test |54.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |54.2%| [TS] {RESULT} ydb/tests/olap/docs/generator/import_test |54.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |54.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> statistics_workload::import_test [GOOD] |55.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |55.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/ydb-tests-sql >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] |55.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/statistics_workload/import_test >> statistics_workload::import_test [GOOD] |55.5%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/cfg |55.8%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test >> ydb-tests-stress-mixedpy::import_test [GOOD] |56.1%| [TS] {RESULT} ydb/tests/stress/statistics_workload/import_test >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] >> integrations_test.py::test_read_jtest_with_one_result [GOOD] |56.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/import_test >> ydb-tests-stress-mixedpy::import_test [GOOD] |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/kpz1/00029e/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/kpz1/00029e/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) |57.1%| [TS] {RESULT} ydb/tests/stress/mixedpy/import_test |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |57.9%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/replay >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |58.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> ydb-tests-functional-rename::import_test [GOOD] >> ydb-tests-functional-autoconfig::import_test [GOOD] |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption >> ydb-tests-functional-canonical::import_test [GOOD] >> ydb-tests-fq-mem_alloc::import_test [GOOD] |59.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/import_test >> ydb-tests-functional-rename::import_test [GOOD] >> ydb-tests-tools-pq_read-test::import_test [GOOD] >> ydb_configure::import_test [GOOD] |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/import_test >> ydb-tests-functional-autoconfig::import_test [GOOD] |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] |59.4%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/statistics_workload |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] >> ydb-tests-fq-common::import_test [GOOD] |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |59.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/import_test >> ydb-tests-tools-pq_read-test::import_test [GOOD] |59.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/import_test >> ydb_configure::import_test [GOOD] >> ydb-tests-stress-kv-tests::import_test [GOOD] |59.7%| [TS] {RESULT} ydb/tests/functional/rename/import_test >> ydb-tests-functional-minidumps::import_test [GOOD] |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |59.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/import_test >> ydb-tests-fq-common::import_test [GOOD] |60.0%| [TS] {RESULT} ydb/tests/functional/autoconfig/import_test |60.0%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/nemesis |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |60.0%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/ydb-tests-sql-large >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] |60.2%| [TS] {RESULT} ydb/tests/functional/canonical/import_test |60.3%| [TS] {RESULT} ydb/tests/tools/pq_read/test/import_test |60.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/import_test >> ydb-tests-functional-minidumps::import_test [GOOD] |60.4%| [TS] {RESULT} ydb/tools/cfg/bin/import_test |60.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/import_test >> ydb-tests-stress-kv-tests::import_test [GOOD] |60.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |60.6%| [TS] {RESULT} ydb/tests/fq/common/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |60.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |60.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |61.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |61.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |61.1%| [TS] {RESULT} ydb/tests/functional/minidumps/import_test |61.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |61.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ydb-tests-olap |61.4%| [TS] {RESULT} ydb/tests/stress/kv/tests/import_test |61.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] >> ydb-tests-functional-wardens::import_test [GOOD] |62.2%| [TS] {RESULT} ydb/tests/functional/blobstorage/import_test |62.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |62.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |62.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/import_test >> ydb-tests-functional-wardens::import_test [GOOD] |62.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |62.9%| [TS] {RESULT} ydb/tests/functional/wardens/import_test |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |63.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql >> ydb-tests-fq-multi_plane::import_test [GOOD] |63.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/import_test >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |64.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/import_test >> ydb-tests-fq-multi_plane::import_test [GOOD] |64.3%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/import_test >> ydb-tests-functional-serializable::import_test [GOOD] >> ydb-tests-functional-tpc-medium::import_test [GOOD] |64.8%| [TS] {RESULT} ydb/tests/fq/multi_plane/import_test >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |66.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> ydb_serializable::import_test [GOOD] |66.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/import_test >> ydb-tests-functional-serializable::import_test [GOOD] |66.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/import_test >> ydb-tests-functional-tpc-medium::import_test [GOOD] |66.9%| [TS] {RESULT} ydb/tests/functional/serializable/import_test |67.1%| [TS] {RESULT} ydb/tests/functional/tpc/medium/import_test >> ydb-library-yaml_config-ut_transform::import_test [GOOD] >> nemesis::import_test [GOOD] |67.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/import_test >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |67.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/import_test >> ydb_serializable::import_test [GOOD] |67.8%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/import_test |68.0%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/import_test |68.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/import_test >> ydb-library-yaml_config-ut_transform::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] 2025-05-05 09:40:19,334 ERROR devtools.ya.test.canon.compare: Cannot calculate diff: Traceback (most recent call last): File "devtools/ya/test/canon/compare.py", line 402, in _get_file_diff_via_diff raise Exception( Exception: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] >> ydb-tests-stability-ydb::import_test [GOOD] |68.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/import_test >> nemesis::import_test [GOOD] |68.7%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |69.1%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/import_test |69.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/import_test >> ydb-tests-stability-ydb::import_test [GOOD] |69.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/import_test >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] >> ydb-tests-olap-data_quotas::import_test [GOOD] |70.0%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/import_test |70.0%| [TS] {RESULT} ydb/tests/stability/ydb/import_test |70.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |70.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/import_test >> ydb-tests-olap-data_quotas::import_test [GOOD] |70.7%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test |71.0%| [TS] {RESULT} ydb/tests/olap/data_quotas/import_test >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] |72.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/import_test >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] |72.9%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test |73.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |73.5%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/import_test >> ydb-tests-functional-restarts::import_test [GOOD] >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |75.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/import_test >> ydb-tests-functional-restarts::import_test [GOOD] |76.4%| [TS] {RESULT} ydb/tests/functional/restarts/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] |76.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |77.4%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/import_test |78.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] |78.7%| [TS] {RESULT} ydb/tests/functional/scheme_shard/import_test |79.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tools/sql2yql/sql2yql |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |79.2%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |79.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |79.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |79.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |79.3%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] |79.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |79.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/import_test >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] |79.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] >> ydb-tests-olap-s3_import::import_test [GOOD] >> replay::import_test [GOOD] |79.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/import_test >> ydb-tests-olap-s3_import::import_test [GOOD] |79.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/import_test >> replay::import_test [GOOD] |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |79.3%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/import_test |79.4%| [TS] {RESULT} ydb/tests/olap/s3_import/import_test |79.3%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |79.4%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/ydb_cli |79.4%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ydb_cli |79.4%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/import_test >> ydb-tests-functional-compatibility::import_test [GOOD] >> ydb-tests-fq-plans::import_test [GOOD] >> ydb-tests-fq-http_api::import_test [GOOD] |79.4%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> ydb-tests-functional-encryption::import_test [GOOD] |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/import_test >> ydb-tests-functional-compatibility::import_test [GOOD] |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/import_test >> ydb-tests-fq-plans::import_test [GOOD] |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] >> ydb-tests-functional-cms::import_test [GOOD] >> ydb-tests-functional-script_execution::import_test [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] >> test_init.py::TestTpcdsInit::test_s1_row >> test_init.py::TestTpcdsInit::test_s1_column >> test_init.py::TestTpchInit::test_s1_column >> ydb-tests-functional-ttl::import_test [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/import_test >> ydb-tests-functional-cms::import_test [GOOD] >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_parts >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] >> test_init.py::TestTpchInit::test_s1_column [GOOD] >> test_generator.py::TestTpchGenerator::test_s1_state |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/import_test >> ydb-tests-functional-script_execution::import_test [GOOD] |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-ttl::import_test [GOOD] >> ydb-tests-datashard-dml::import_test [GOOD] >> test_generator.py::TestTpchGenerator::test_s1 >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts >> ydb-tests-olap::import_test [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1 >> ydb-tests-functional-large_serializable::import_test [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/import_test >> ydb-tests-datashard-dml::import_test [GOOD] >> kqprun_recipe::import_test [GOOD] >> ydb-tests-library-ut::import_test [GOOD] >> ydb-tests-functional-hive::import_test [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] >> ydb-tests-functional-tenants::import_test [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/import_test >> ydb-tests-olap::import_test [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/import_test >> ydb-tests-library-ut::import_test [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/import_test >> ydb-tests-functional-large_serializable::import_test [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/import_test >> ydb-tests-functional-tenants::import_test [GOOD] >> test_generator.py::TestTpchGenerator::test_s1_parts >> test_init.py::TestTpcdsInit::test_s1_s3 |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] >> ydb-tests-olap-column_family-compression::import_test [GOOD] >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] >> test_init.py::TestTpchInit::test_s100_column [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/import_test >> ydb-tests-olap-column_family-compression::import_test [GOOD] >> ydb-tests-functional-suite_tests::import_test [GOOD] >> ydb-tests-olap-oom::import_test [GOOD] >> ydb-tests-functional-query_cache::import_test [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/import_test >> ydb-tests-olap-oom::import_test [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_state >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-suite_tests::import_test [GOOD] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s100_column [GOOD] >> ydb-tests-functional-postgresql::import_test [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] |79.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/import_test >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] |79.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] >> oltp_workload::import_test [GOOD] >> ydb-tests-fq-restarts::import_test [GOOD] >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts >> test_init.py::TestClickbenchInit::test_s1_s3 >> test_init.py::TestClickbenchInit::test_s1_column [GOOD] >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |79.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/import_test >> oltp_workload::import_test [GOOD] |79.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] >> test_init.py::TestClickbenchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpcdsInit::test_s100_column >> test_init.py::TestTpchInit::test_s1_row [GOOD] >> test_init.py::TestTpcdsInit::test_s100_column [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> ydb-tests-sql::import_test [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test |79.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/import_test >> ydb-tests-sql::import_test [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s100_column [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_row [GOOD] >> ydb-tests-datashard-copy_table::import_test [GOOD] ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |79.8%| [TS] {RESULT} ydb/tests/fq/http_api/import_test |79.8%| [TS] {RESULT} ydb/tests/fq/plans/import_test |79.8%| [TS] {RESULT} ydb/tests/functional/cms/import_test |79.8%| [TS] {RESULT} ydb/tests/functional/script_execution/import_test |79.8%| [TS] {RESULT} ydb/tests/fq/restarts/import_test |79.8%| [TS] {RESULT} ydb/tests/stress/oltp_workload/import_test |79.8%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |79.8%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/import_test |79.8%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test |79.9%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |79.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/import_test |79.9%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test |79.9%| [TS] {RESULT} ydb/tests/olap/column_family/compression/import_test |79.9%| [TS] {RESULT} ydb/tests/olap/oom/import_test |79.9%| [TS] {RESULT} ydb/tests/functional/tenants/import_test |79.9%| [TS] {RESULT} ydb/tests/functional/compatibility/import_test |79.9%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test |79.9%| [TS] {RESULT} ydb/tests/functional/hive/import_test |79.9%| [TS] {RESULT} ydb/tests/functional/large_serializable/import_test |80.0%| [TS] {RESULT} ydb/tests/library/ut/import_test |80.0%| [TS] {RESULT} ydb/tests/functional/ttl/import_test |80.0%| [TS] {RESULT} ydb/tests/olap/import_test |80.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/import_test >> ydb-tests-datashard-copy_table::import_test [GOOD] |80.0%| [TS] {RESULT} ydb/tests/sql/import_test |80.0%| [TS] {RESULT} ydb/tests/datashard/dml/import_test |80.0%| [TS] {RESULT} ydb/tests/datashard/copy_table/import_test >> test.py::test[ParseFromYdb] >> test.py::test[BitSerialization] >> test.py::test[aggregation] >> test.py::test[BitSerialization] [GOOD] >> test.py::test[CosineDistance] >> test.py::test[ParseFromYdb] [GOOD] >> test.py::test[SerializeCSVWithNames] >> ydb-tests-sql-large::import_test [GOOD] |80.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/import_test >> ydb-tests-sql-large::import_test [GOOD] >> test.py::test[aggregation] [GOOD] >> test.py::test[cardinality] >> ydb-tests-fq-s3::import_test [GOOD] |80.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/import_test >> ydb-tests-fq-s3::import_test [GOOD] |80.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |80.0%| [TS] {RESULT} ydb/tests/sql/large/import_test |80.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |80.1%| [TS] {RESULT} ydb/tests/fq/s3/import_test >> test.py::test[CosineDistance] [GOOD] >> test.py::test[CosineSimilarity] >> test.py::test[cardinality] [GOOD] >> test.py::test[intersect] >> test.py::test[SerializeCSVWithNames] [GOOD] >> test.py::test[SerializeJSONEachRow] >> ydb-tests-olap-scenario::import_test [GOOD] |80.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-scenario::import_test [GOOD] |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |80.1%| [TS] {RESULT} ydb/tests/olap/scenario/import_test |80.1%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/dqrun |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |80.1%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |80.2%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> test.py::test[CosineSimilarity] [GOOD] >> test.py::test[ErrorDistanceInvalidFormat] >> test.py::test[SerializeJSONEachRow] [GOOD] >> test.py::test[SerializeParquet] |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |80.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |80.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> test.py::test[ErrorDistanceInvalidFormat] [GOOD] >> test.py::test[ErrorDistanceSameFormat] |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |80.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> ydb-tests-datashard-s3::import_test [GOOD] |80.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/import_test >> ydb-tests-datashard-s3::import_test [GOOD] |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |80.3%| [TS] {RESULT} ydb/tests/datashard/s3/import_test |80.3%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd >> test.py::test[intersect] [GOOD] >> test.py::test[run_optimize] |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |80.3%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> test.py::test[ErrorDistanceSameFormat] [GOOD] >> test.py::test[ErrorDistanceSameSize] |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/kqprun |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |80.4%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun >> test.py::test[run_optimize] [GOOD] >> test.py::test[serialize_deserialize] >> test.py::test[ErrorDistanceSameSize] [GOOD] >> test.py::test[ErrorDistanceSameTag] >> test.py::test[join-filter_joined-off-Results] >> test.py::test[join-filter_joined-off-Results] [SKIPPED] >> test.py::test[join-flatten_columns1--Results] >> test.py::test[bigdate-tz_table_pull--Results] >> test.py::test[ErrorDistanceSameTag] [GOOD] >> test.py::test[SerializeParquet] [GOOD] >> test.py::test[SerializeParquetPartitioned] |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/ut/ydb-core-client-ut |80.4%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut >> test.py::test[ErrorFloatFromBinaryStringBitVector] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-ForceBlocks] >> test.py::test[select-one_unlabeled_column-default.txt-Results] >> test.py::test[lineage-window_session-default.txt-Results] >> test.py::test[pg-tpcds-q62-default.txt-Results] >> test.py::test[lineage-window_session-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-flatmap_with_non_struct_out--Results] [SKIPPED] >> test.py::test[optimizers-nonselected_direct_row--Results] >> test.py::test[optimizers-field_subset_for_multiusage--ForceBlocks] >> test.py::test[weak_field-weak_field_strict--Results] >> test.py::test[count-boolean_count--Results] >> test.py::test[insert-select_relabel-default.txt-Results] >> test.py::test[blocks-combine_hashed_avg--Results] >> test.py::test[expr-empty_iterator--Results] >> test.py::test[schema-select_all_inferschema_limit--ForceBlocks] >> test.py::test[insert-anonymous_tables-default.txt-Results] >> test.py::test[join-late_mergejoin_on_empty--Results] [SKIPPED] >> test.py::test[join-left_cast_to_string-off-Results] >> test.py::test[produce-reduce_with_python_input_stream--ForceBlocks] >> test.py::test[join-left_cast_to_string-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Results] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[action-evaluate_match_type-default.txt-Results] >> test.py::test[produce-process_rows_sorted_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] >> test.py::test[pg-tpcds-q47-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_udf--Results] >> test.py::test[select-logical_ops-default.txt-Results] >> test.py::test[action-action_eval_cluster_table--ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] >> test.py::test[join-lookupjoin_bug7646_csee--ForceBlocks] >> test.py::test[action-eval_if-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] >> test.py::test[ErrorFloatFromBinaryStringBitVector] [GOOD] >> test.py::test[action-action_eval_cluster_table--Results] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] >> test.py::test[type_v3-json--Results] >> test.py::test[ErrorFloatFromBinaryStringEmpty] >> test.py::test[action-dep_world_quote_code-default.txt-Results] >> test.py::test[insert_monotonic-not_all_fail--ForceBlocks] >> test.py::test[action-action_eval_cluster_and_table-default.txt-ForceBlocks] >> test.py::test[hor_join-merge_multiouts_part--ForceBlocks] >> test.py::test[hor_join-merge_multiouts_part--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_part--Results] >> test.py::test[tpch-q11-default.txt-Results] >> test.py::test[hor_join-merge_multiouts_part--Results] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--Results] >> test.py::test[hor_join-sorted_out_mix--Results] [SKIPPED] >> test.py::test[hor_join-yield_on-default.txt-ForceBlocks] >> test.py::test[serialize_deserialize] [GOOD] >> test.py::test[union] >> test.py::test[ErrorFloatFromBinaryStringEmpty] [GOOD] >> test.py::test[ErrorFloatFromBinaryStringInvalid] >> test.py::test[produce-reduce_with_python_input_stream--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_with_python_input_stream--Results] >> test.py::test[produce-reduce_with_python_input_stream--Results] [GOOD] >> test.py::test[produce-reduce_with_python_row--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_row--Results] [SKIPPED] >> test.py::test[sampling-insert--ForceBlocks] |80.4%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o |80.4%| [PY] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/objcopy_533f06087e794c7af638ea75dc.o >> test.py::test[insert_monotonic-not_all_fail--ForceBlocks] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-ForceBlocks] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] >> test.py::test[select-one_unlabeled_column-default.txt-Results] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] >> test.py::test[insert_monotonic-not_all_fail--Results] [GOOD] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-ForceBlocks] >> test.py::test[bigdate-tz_table_pull--Results] [GOOD] >> test.py::test[binding-insert_binding--Results] >> test.py::test[ErrorFloatFromBinaryStringInvalid] [GOOD] >> test.py::test[EuclideanDistance] >> test.py::test[pg-tpcds-q62-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Results] >> test.py::test[SerializeParquetPartitioned] [GOOD] >> test.py::test[SerializeTSVWithNames] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-ForceBlocks] [GOOD] |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] |80.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/fqrun/fqrun |80.4%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun >> test.py::test[optimizers-nonselected_direct_row--Results] [GOOD] >> test.py::test[weak_field-weak_field_strict--Results] [GOOD] >> test.py::test[insert-select_relabel-default.txt-Results] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] >> test.py::test[weak_field-weak_field_type-default.txt-Results] >> test.py::test[select-logical_ops-default.txt-Results] [GOOD] >> test.py::test[select-match_clause--Results] >> test.py::test[schema-select_all_inferschema_limit--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Results] >> test.py::test[union] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] [GOOD] |80.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |80.4%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |80.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |80.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a >> test.py::test[expr-empty_iterator--Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_column_fail--Results] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_subfields--Results] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Results] |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf--Results] >> test.py::test[produce-reduce_subfields--Results] [SKIPPED] >> test.py::test[produce-reduce_subfields-sorted-Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys--Results] [SKIPPED] >> test.py::test[ql_filter-integer_eval--Results] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Results] [SKIPPED] >> test.py::test[join-simple_columns_partial--Results] >> test.py::test[action-evaluate_match_type-default.txt-Results] [GOOD] >> test.py::test[action-evaluate_queries--Results] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] [GOOD] >> test.py::test[action-eval_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_and_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_and_table-default.txt-Results] >> test.py::test[insert-anonymous_tables-default.txt-Results] [GOOD] |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> test.py::test[insert-fail_read_view_after_modify--Results] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] >> test.py::test[action-eval_if-default.txt-Results] >> test.py::test[type_v3-json--Results] [GOOD] >> test.py::test[type_v3-non_strict--Results] >> test.py::test[join-flatten_columns1--Results] [GOOD] >> test.py::test[join-full_trivial_udf_call-off-Results] >> test.py::test[join-full_trivial_udf_call-off-Results] [SKIPPED] >> test.py::test[join-inner_on_key_only--Results] >> test.py::test[action-action_eval_cluster_table--Results] [GOOD] >> test.py::test[action-empty_do-default.txt-Results] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_sampling-sorted-ForceBlocks] >> test.py::test[count-boolean_count--Results] [GOOD] |80.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/udfs/common/roaring/test/py3test >> test.py::test[union] [GOOD] >> test.py::test[action-action_eval_cluster_table--ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_table--Results] >> test.py::test[hor_join-yield_on-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-yield_on-default.txt-Results] >> test.py::test[count-count_by_nulls--Results] >> test.py::test[optimizers-field_subset_for_multiusage--ForceBlocks] [GOOD] >> test.py::test[optimizers-field_subset_for_multiusage--Results] |80.5%| [TM] {RESULT} ydb/library/yql/udfs/common/roaring/test/py3test >> test.py::test[schema-select_all_inferschema_limit--Results] [GOOD] >> test.py::test[schema-select_field-schema-ForceBlocks] >> test.py::test[expr-non_persistable_group_by_column_fail--Results] [GOOD] >> test.py::test[flatten_by-flatten_list--Results] >> test.py::test[SerializeTSVWithNames] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Results] [GOOD] >> test.py::test[action-eval_atom_wrong_type_expr--Results] [SKIPPED] >> test.py::test[action-eval_each_input_table-default.txt-Results] >> test.py::test[binding-insert_binding--Results] [GOOD] >> test.py::test[binding-table_concat_binding-default.txt-Results] >> test.py::test[YqlType] >> test.py::test[action-action_eval_cluster_and_table-default.txt-Results] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-ForceBlocks] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] [GOOD] >> test.py::test[select-struct_members-default.txt-Results] >> test.py::test[pg-tpcds-q81-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Results] >> test.py::test[insert-fail_read_view_after_modify--Results] [GOOD] >> test.py::test[action-eval_if-default.txt-Results] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt-Results] >> test.py::test[insert-keepmeta-with_view-Results] [SKIPPED] >> test.py::test[insert-override-proto-Results] >> test.py::test[lineage-select_group_by_all-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q47-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q83-default.txt-ForceBlocks] >> test.py::test[aggr_factory-corellation-default.txt-ForceBlocks] >> test.py::test[lineage-select_group_by_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] [GOOD] >> test.py::test[lineage-unordered_subquery-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct--Results] >> test.py::test[lineage-unordered_subquery-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Results] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-keep_sort_with_renames--ForceBlocks] >> test.py::test[blocks-combine_hashed_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--Results] >> test.py::test[action-action_eval_cluster_table--Results] [GOOD] >> test.py::test[action-eval_filter--ForceBlocks] >> test.py::test[sampling-insert--ForceBlocks] [GOOD] >> test.py::test[sampling-insert--Results] |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay_yt/query_replay_yt |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |80.6%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> test.py::test[hor_join-yield_on-default.txt-Results] [GOOD] >> test.py::test[in-in_ansi_join--ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_csee--ForceBlocks] [GOOD] >> test.py::test[action-evaluate_queries--Results] [GOOD] >> test.py::test[EuclideanDistance] [GOOD] >> test.py::test[select-match_clause--Results] [GOOD] >> test.py::test[action-export_action--Results] >> test.py::test[select-swap_columns-default.txt-Results] >> test.py::test[InnerProductSimilarity] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee--Results] >> test.py::test[binding-anon_table_binding-default.txt-Results] [GOOD] >> test.py::test[insert-two_input_tables--Results] >> test.py::test[action-empty_do-default.txt-Results] [GOOD] >> test.py::test[action-eval_folder--Results] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--Results] >> test.py::test[YqlType] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag--Results] >> test.py::test[binding-bind_select-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_eval--Results] [GOOD] >> test.py::test[ql_filter-integer_single_equals--Results] |80.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/udfs/common/clickhouse/client/test/py3test >> test.py::test[YqlType] [GOOD] |80.6%| [TM] {RESULT} ydb/library/yql/udfs/common/clickhouse/client/test/py3test >> test.py::test[insert_monotonic-truncate_and_append-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] >> test.py::test[tpch-q11-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_udf--Results] [GOOD] >> test.py::test[optimizers-field_subset_for_multiusage--Results] [GOOD] >> test.py::test[optimizers-fuse_map_mapreduce--ForceBlocks] [SKIPPED] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] >> test.py::test[optimizers-fuse_map_mapreduce--Results] [SKIPPED] >> test.py::test[optimizers-group_visit_lambdas--ForceBlocks] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] [GOOD] >> test.py::test[select-anon_clash--Results] |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> test.py::test[sampling-insert--Results] [GOOD] >> test.py::test[schema-insert_sorted-schema-ForceBlocks] >> test.py::test[binding-table_concat_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int16--Results] >> test.py::test[action-eval_each_input_table-default.txt-Results] [GOOD] >> test.py::test[action-eval_filter--Results] >> test.py::test[schema-select_field-schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_field-schema-Results] >> test.py::test[type_v3-non_strict--Results] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] >> test.py::test[join-inner_on_key_only--Results] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by--Results] >> test.py::test[aggregate-group_by_rollup_udf--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] >> test.py::test[count-count_by_nulls--Results] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] >> test.py::test[insert-override-proto-Results] [GOOD] >> test.py::test[insert-replace_inferred_op--Results] >> test.py::test[produce-reduce_multi_in_sampling-sorted-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] >> test.py::test[select-struct_members-default.txt-Results] [GOOD] >> test.py::test[select-substring_v1-default.txt-Results] >> test.py::test[action-eval_for_over_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] [GOOD] >> test.py::test[join-cbo_7tables--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_7tables--Results] [SKIPPED] >> test.py::test[join-equi_join_by_expr--ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_csee--Results] [GOOD] >> test.py::test[join-lookupjoin_bug8533--ForceBlocks] >> test.py::test[schema-select_field-schema-Results] [GOOD] >> test.py::test[schema-user_schema_mix2--ForceBlocks] >> test.py::test[action-eval_folder--Results] [GOOD] >> test.py::test[action-eval_like--Results] >> test.py::test[pg-tpcds-q83-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q83-default.txt-Results] >> test.py::test[select-anon_clash--Results] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Results] |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/tool |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool |80.6%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool >> test.py::test[blocks-string_with--ForceBlocks] >> test.py::test[join-mapjoin_with_empty_struct--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary-off-Results] >> test.py::test[ql_filter-integer_single_equals--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align3-off-Results] >> test.py::test[select-swap_columns-default.txt-Results] [GOOD] >> test.py::test[sampling-reduce--Results] [SKIPPED] >> test.py::test[sampling-system_sampling--Results] [SKIPPED] >> test.py::test[schema-append_to_desc--Results] >> test.py::test[binding-bind_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-bind_select-default.txt-Results] >> test.py::test[join-mergejoin_force_align3-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted--Results] >> test.py::test[optimizers-keep_sort_with_renames--ForceBlocks] [GOOD] >> test.py::test[optimizers-keep_sort_with_renames--Results] >> test.py::test[select-unlabeled_1000--Results] |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut >> test.py::test[InnerProductSimilarity] [GOOD] >> test.py::test[Int8Serialization] >> test.py::test[action-eval_filter--ForceBlocks] [GOOD] >> test.py::test[insert-two_input_tables--Results] [GOOD] >> test.py::test[insert-udf_empty--Results] >> test.py::test[flatten_by-flatten_list--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage--Results] [SKIPPED] >> test.py::test[hor_join-out_range-default.txt-Results] >> test.py::test[action-eval_filter--Results] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] [GOOD] >> test.py::test[action-eval_skip_take--ForceBlocks] >> test.py::test[blocks-combine_hashed_count--Results] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Results] >> test.py::test[action-eval_filter--Results] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--Results] >> test.py::test[pg-tpcds-q83-default.txt-Results] [GOOD] >> test.py::test[pragma-config_exec--ForceBlocks] >> test.py::test[blocks-date_add_interval--Results] >> test.py::test[type_v3-replace_diff_layout--Results] [GOOD] >> test.py::test[udf-python_struct--Results] [SKIPPED] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] [GOOD] >> test.py::test[result_types-pg-default.txt-ForceBlocks] >> test.py::test[binding-bind_select-default.txt-Results] [GOOD] >> test.py::test[binding-drop_binding--ForceBlocks] >> test.py::test[action-export_action--Results] [GOOD] >> test.py::test[action-unwrap_runtime_fail_with_column_message--Results] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee--Results] >> test.py::test[action-eval_filter--Results] [GOOD] >> test.py::test[pg-aggregate_combine--Results] >> test.py::test[blocks-nested_optionals--Results] >> test.py::test[count-count_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Results] >> test.py::test[Int8Serialization] [GOOD] >> test.py::test[LazyListSerialization] >> test.py::test[action-eval_table_with_view-default.txt-ForceBlocks] >> test.py::test[blocks-add_int16--Results] [GOOD] >> test.py::test[blocks-add_int32--Results] >> test.py::test[optimizers-group_visit_lambdas--ForceBlocks] [GOOD] >> test.py::test[optimizers-group_visit_lambdas--Results] |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |80.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> test.py::test[insert-replace_inferred_op--Results] [GOOD] >> test.py::test[insert-select_after_replace-default.txt-Results] >> test.py::test[select-substring_v1-default.txt-Results] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] >> test.py::test[action-mixed_eval_typeof_world1--Results] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] >> test.py::test[optimizers-keep_sort_with_renames--Results] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-ForceBlocks] >> test.py::test[join-simple_columns_partial--Results] [GOOD] >> test.py::test[action-eval_like--Results] [GOOD] >> test.py::test[action-eval_regexp--Results] >> test.py::test[LazyListSerialization] [GOOD] >> test.py::test[join-star_join_semionly--Results] >> test.py::test[join-lookupjoin_bug8533--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug8533--Results] >> test.py::test[ListSerialization] >> test.py::test[schema-insert_sorted-schema-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix2--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix2--Results] >> test.py::test[tpch-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q20-default.txt-Results] >> test.py::test[select-corr_name_in_select-default.txt-Results] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Results] >> test.py::test[join-join_no_correlation_in_order_by--Results] [GOOD] >> test.py::test[join-equi_join_by_expr--ForceBlocks] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-join_without_column--Results] >> test.py::test[action-unwrap_runtime_fail_with_column_message--Results] [GOOD] >> test.py::test[join-equi_join_by_expr--Results] >> test.py::test[aggr_factory-list--Results] >> test.py::test[schema-insert_sorted-schema-Results] >> test.py::test[aggr_factory-corellation-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-ForceBlocks] >> test.py::test[schema-append_to_desc--Results] [GOOD] >> test.py::test[schema-copy-yamred_dsv_raw-Results] >> test.py::test[select-unlabeled_1000--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] >> test.py::test[blocks-string_with--ForceBlocks] [GOOD] >> test.py::test[blocks-string_with--Results] >> test.py::test[optimizers-group_visit_lambdas--Results] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--ForceBlocks] >> test.py::test[insert-udf_empty--Results] [GOOD] >> test.py::test[insert-yql-14538--Results] >> test.py::test[action-eval_skip_take--ForceBlocks] [GOOD] >> test.py::test[action-eval_skip_take--Results] >> test.py::test[binding-drop_binding--ForceBlocks] [GOOD] >> test.py::test[binding-drop_binding--Results] >> test.py::test[join-mergejoin_force_no_sorted--Results] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort--Results] >> test.py::test[ListSerialization] [GOOD] >> test.py::test[ManhattanDistance] >> test.py::test[schema-user_schema_mix2--Results] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--ForceBlocks] >> test.py::test[schema-insert_sorted-schema-Results] [GOOD] >> test.py::test[schema-select_fields_inferschema--ForceBlocks] >> test.py::test[join-lookupjoin_bug8533--Results] [GOOD] >> test.py::test[join-lookupjoin_bug8533-off-ForceBlocks] >> test.py::test[blocks-nested_optionals--Results] [GOOD] >> test.py::test[blocks-string_as_agg_key--Results] >> test.py::test[blocks-add_int32--Results] [GOOD] >> test.py::test[blocks-add_int8--Results] >> test.py::test[action-eval_table_with_view-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Results] >> test.py::test[pragma-config_exec--ForceBlocks] [GOOD] >> test.py::test[pragma-config_exec--Results] >> test.py::test[result_types-pg-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-pg-default.txt-Results] >> test.py::test[binding-drop_binding--Results] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt-ForceBlocks] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] [GOOD] >> test.py::test[blocks-string_with--Results] [GOOD] >> test.py::test[join-equi_join_by_expr--Results] [GOOD] >> test.py::test[blocks-struct_type--ForceBlocks] >> test.py::test[join-inner_all_right-off-ForceBlocks] >> test.py::test[union_all-union_all_fields-default.txt-Results] >> test.py::test[action-eval_skip_take--Results] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-ForceBlocks] >> test.py::test[action-eval_regexp--Results] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] >> test.py::test[insert-select_after_replace-default.txt-Results] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] >> test.py::test[schema-copy-yamred_dsv_raw-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] >> test.py::test[blocks-date_add_interval--Results] [GOOD] >> test.py::test[blocks-date_less_scalar--Results] >> test.py::test[tpch-q20-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] >> test.py::test[action-eval_table_with_view-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-ForceBlocks] >> test.py::test[tpch-q9-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_csee--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Results] >> test.py::test[result_types-pg-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--ForceBlocks] >> test.py::test[pg-aggregate_combine--Results] [GOOD] >> test.py::test[pg-join_using_multiple2--Results] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] [GOOD] >> test.py::test[select-where_with_lambda--Results] >> test.py::test[pragma-config_exec--Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull--ForceBlocks] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull--Results] [SKIPPED] >> test.py::test[produce-process_lambda_opt_args-default.txt-ForceBlocks] >> test.py::test[select-dict_with_few_keys-default.txt-Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] >> test.py::test[schema-yamred_dsv_select_from_dict--ForceBlocks] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] >> test.py::test[count-count_nullable_sub-default.txt-Results] [GOOD] >> test.py::test[join-join_without_column--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Results] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] >> test.py::test[schema-select_fields_inferschema--ForceBlocks] [GOOD] >> test.py::test[schema-select_fields_inferschema--Results] >> test.py::test[join-star_join_semionly--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] [GOOD] >> test.py::test[order_by-SortByTwoFields--Results] >> test.py::test[blocks-add_int8--Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter--Results] >> test.py::test[join-lookupjoin_bug8533-off-ForceBlocks] [GOOD] >> test.py::test[insert-yql-14538--Results] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--Results] >> test.py::test[join-lookupjoin_bug8533-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi-off-ForceBlocks] >> test.py::test[ManhattanDistance] [GOOD] >> test.py::test[NullForwarding] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-ForceBlocks] >> test.py::test[binding-named_node_corr_names-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--ForceBlocks] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] >> test.py::test[binding-named_node_corr_names-default.txt-Results] >> test.py::test[aggr_factory-histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-Results] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv_raw-Results] >> test.py::test[hor_join-out_range-default.txt-Results] [GOOD] >> test.py::test[hor_join-row_num_per_sect--Results] >> test.py::test[schema-select_fields_inferschema--Results] [GOOD] >> test.py::test[join-inner_all_right-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_all_right-off-Results] [SKIPPED] >> test.py::test[join-inner_on_key_only--ForceBlocks] >> test.py::test[select-append_to_value_1000--ForceBlocks] [SKIPPED] >> test.py::test[select-append_to_value_1000--Results] [SKIPPED] >> test.py::test[select-opt_list_access-default.txt-ForceBlocks] >> test.py::test[aggr_factory-every-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Results] >> test.py::test[blocks-struct_type--ForceBlocks] [GOOD] >> test.py::test[blocks-struct_type--Results] >> test.py::test[union_all-union_all_fields-default.txt-Results] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] [GOOD] >> test.py::test[action-evaluate_pure--Results] >> test.py::test[union_all-union_all_trivial-default.txt-Results] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--ForceBlocks] >> test.py::test[window-full/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-full/leadlag--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] [GOOD] >> test.py::test[NullForwarding] [GOOD] >> test.py::test[OptionalAutoUnpacking] >> test.py::test[insert_monotonic-break_sort_fail--Results] [GOOD] >> test.py::test[insert_monotonic-not_all_fail--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] >> test.py::test[binding-named_node_corr_names-default.txt-Results] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_key_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] >> test.py::test[select-where_with_lambda--Results] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] >> test.py::test[pg-tpch-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] >> test.py::test[join-mergejoin_narrows_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join--Results] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_before_commit_fail--Results] >> test.py::test[sampling-direct_read--ForceBlocks] [GOOD] >> test.py::test[sampling-direct_read--Results] >> test.py::test[select-multi_source_issue-default.txt-Results] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-Results] >> test.py::test[produce-process_lambda_opt_args-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] >> test.py::test[action-eval_unresolved_type_arg-default.txt-Results] [GOOD] >> test.py::test[action-eval_values_output_table_subquery--ForceBlocks] >> test.py::test[blocks-string_as_agg_key--Results] [GOOD] >> test.py::test[OptionalAutoUnpacking] [GOOD] >> test.py::test[Uint8Serialization] >> test.py::test[aggr_factory-list--Results] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] [GOOD] >> test.py::test[blocks-top_sort_one_asc--Results] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] >> test.py::test[blocks-struct_type--Results] [GOOD] >> test.py::test[column_group-hint_diff_grp_fail2--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail2--Results] >> test.py::test[aggr_factory-histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-ForceBlocks] >> test.py::test[optimizers-unordered_over_sort--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-unordered_over_sort--Results] [SKIPPED] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail2--Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--Results] [SKIPPED] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--Results] [SKIPPED] >> test.py::test[order_by-literal_single_item_sort--ForceBlocks] >> test.py::test[join-lookupjoin_semi_subq--Results] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord--Results] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--Results] >> test.py::test[insert_monotonic-not_all_fail--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] >> test.py::test[order_by-SortByTwoFields--Results] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] >> test.py::test[column_group-min_group-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert_with_reorder_cols--ForceBlocks] >> test.py::test[join-mapjoin_with_empty_read--Results] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] >> test.py::test[blocks-combine_all_count_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] >> test.py::test[schema-select_all-yamred_dsv_raw-Results] [GOOD] >> test.py::test[schema-select_all_inferschema--Results] >> test.py::test[sampling-direct_read--Results] [GOOD] >> test.py::test[sampling-direct_read-dynamic-ForceBlocks] >> test.py::test[tpch-q9-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Results] >> test.py::test[join-lookupjoin_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi-off-Results] >> test.py::test[union_all-union_all_trivial-default.txt-Results] [GOOD] >> test.py::test[view-all_from_view--Results] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--ForceBlocks] >> test.py::test[tpch-q9-default.txt-Results] >> test.py::test[insert-use_anon_table_before_commit_fail--Results] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--Results] >> test.py::test[join-inner_on_key_only--ForceBlocks] [GOOD] >> test.py::test[join-inner_on_key_only--Results] >> test.py::test[join-lookupjoin_semi-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty--ForceBlocks] >> test.py::test[Uint8Serialization] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] [GOOD] >> test.py::test[select-opt_list_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-opt_list_access-default.txt-Results] >> test.py::test[produce-process_multi_in_single_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_multi_in_single_out--Results] [SKIPPED] >> test.py::test[produce-process_with_python_stream-empty-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] >> test.py::test[action-evaluate_pure--Results] [GOOD] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] >> test.py::test[pg-join_using_multiple2--Results] [GOOD] >> test.py::test[pg-name--Results] |80.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/udfs/common/knn/test/py3test >> test.py::test[Uint8Serialization] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Results] [GOOD] >> test.py::test[select-discard-default.txt-ForceBlocks] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] >> test.py::test[insert_monotonic-non_existing_fail--Results] [GOOD] >> test.py::test[join-alias_where_group--Results] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] |80.7%| [TM] {RESULT} ydb/library/yql/udfs/common/knn/test/py3test >> test.py::test[in-in_ansi_join--ForceBlocks] [GOOD] >> test.py::test[in-in_ansi_join--Results] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] >> test.py::test[join-inner_on_key_only--Results] [GOOD] >> test.py::test[join-left_cast_to_string-off-ForceBlocks] >> test.py::test[hor_join-row_num_per_sect--Results] [GOOD] >> test.py::test[hor_join-yield_off--Results] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--Results] [SKIPPED] >> test.py::test[in-in_with_list_dict-default.txt-Results] >> test.py::test[join-join_without_correlation_and_dict_access--Results] [GOOD] >> test.py::test[join-left_only_semi_and_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst-off-Results] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_minmax_nested--Results] >> test.py::test[tpch-q9-default.txt-Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs2--ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_subst-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o-off-Results] >> test.py::test[action-eval_values_output_table_subquery--ForceBlocks] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--ForceBlocks] >> test.py::test[join-lookupjoin_semi_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty--Results] >> test.py::test[select-optional_as_warn-default.txt-Results] [GOOD] >> test.py::test[select-refselect-1000-Results] >> test.py::test[action-eval_values_output_table_subquery--Results] >> test.py::test[select-opt_list_access-default.txt-Results] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] [GOOD] >> test.py::test[order_by-literal_single_item_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Results] >> test.py::test[select-refselect-1000-Results] [SKIPPED] >> test.py::test[select-scalar_subquery-default.txt-Results] >> test.py::test[select-sample_limit_recordindex--ForceBlocks] >> test.py::test[join-yql-14829_leftonly-off-Results] [SKIPPED] >> test.py::test[join-yql-4275-off-Results] [SKIPPED] >> test.py::test[json-jsondocument/select--Results] >> test.py::test[join-lookupjoin_semi_empty--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_empty--Results] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star1--Results] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--Results] >> test.py::test[schema-select_all_inferschema--Results] [GOOD] >> test.py::test[schema-select_reordered-default.txt-Results] >> test.py::test[blocks-top_sort_one_asc--Results] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Results] >> test.py::test[column_order-insert_with_reorder_cols--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols--Results] >> test.py::test[view-all_from_view--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] >> test.py::test[sampling-direct_read-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-direct_read-dynamic-Results] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort-off-Results] >> test.py::test[action-eval_values_output_table_subquery--Results] [GOOD] >> test.py::test[action-insert_after_eval--ForceBlocks] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] >> test.py::test[join-mergejoin_saves_output_sort-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_semi_to_inner-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-nested_semi_join--Results] [GOOD] >> test.py::test[join-premap_common_multiparents-off-Results] [SKIPPED] >> test.py::test[order_by-literal_single_item_sort--Results] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-Results] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn-off-Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group-off-Results] [SKIPPED] >> test.py::test[join-premap_context_dep--Results] >> test.py::test[join-lookupjoin_semi_empty--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-ForceBlocks] >> test.py::test[join-premap_common_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_no_premap-off-Results] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitand-default.txt-Results] >> test.py::test[produce-process_with_python_stream-empty-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_python_stream-empty-Results] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] >> test.py::test[window-full/leadlag--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed--Results] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-ForceBlocks] >> test.py::test[pg-name--Results] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Results] >> test.py::test[sampling-direct_read-dynamic-Results] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_semi_empty--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Results] >> test.py::test[join-premap_nonseq_flatmap--Results] >> test.py::test[aggr_factory-median-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Results] >> test.py::test[column_order-insert_with_reorder_cols--Results] [GOOD] >> test.py::test[produce-process_with_python_stream-empty-Results] [GOOD] >> test.py::test[column_order-join--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_with_udf_validate-default.txt-ForceBlocks] >> test.py::test[column_order-join--Results] >> test.py::test[aggr_factory-hll-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] [GOOD] >> test.py::test[aggregate-compare_by_tuple--ForceBlocks] >> test.py::test[aggr_factory-hll-default.txt-Results] >> test.py::test[column_order-join--Results] [SKIPPED] >> test.py::test[distinct-distinct_groupby-default.txt-ForceBlocks] >> test.py::test[join-left_cast_to_string-off-ForceBlocks] [GOOD] >> test.py::test[join-left_cast_to_string-off-Results] [SKIPPED] >> test.py::test[join-left_null_literal--ForceBlocks] >> test.py::test[in-in_with_list_dict-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta_view_fail--Results] >> test.py::test[json-jsondocument/select--Results] [GOOD] >> test.py::test[key_filter-contains-default.txt-Results] >> test.py::test[udf-named_args_for_script_with_posargs2--ForceBlocks] [GOOD] >> test.py::test[schema-select_reordered-default.txt-Results] [GOOD] >> test.py::test[schema-select_yamr_fields--Results] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] [GOOD] >> test.py::test[tpch-q10-default.txt-Results] >> test.py::test[blocks-combine_all_max_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Results] >> test.py::test[select-scalar_subquery-default.txt-Results] [GOOD] >> test.py::test[select-select_all-default.txt-Results] >> test.py::test[select-sample_limit_recordindex--ForceBlocks] [GOOD] >> test.py::test[select-sample_limit_recordindex--Results] >> test.py::test[blocks-date_less_scalar--Results] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Results] >> test.py::test[insert-keepmeta_view_fail--Results] [GOOD] >> test.py::test[insert-merge_publish--Results] >> test.py::test[weak_field-optimize_weak_fields_map--Results] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--Results] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] >> test.py::test[blocks-combine_all_minmax_nested--Results] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_max_sorted_tables--Results] [SKIPPED] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Results] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] [GOOD] >> test.py::test[udf-python_script--ForceBlocks] >> test.py::test[join-alias_where_group--Results] [GOOD] >> test.py::test[join-anyjoin_common_dup--Results] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] >> test.py::test[order_by-order_by_expr_simple--Results] >> test.py::test[blocks-combine_all_max_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_min--ForceBlocks] >> test.py::test[order_by-order_by_mul_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] >> test.py::test[distinct-distinct_star1--Results] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt-Results] [SKIPPED] >> test.py::test[dq-mem_limit--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix--Results] [SKIPPED] >> test.py::test[expr-len--Results] >> test.py::test[select-sample_limit_recordindex--Results] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q13-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-duplicated_rowspec--Results] >> test.py::test[action-insert_after_eval--ForceBlocks] [GOOD] >> test.py::test[action-insert_after_eval--Results] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] >> test.py::test[produce-process_with_udf_validate-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_common_multiparents_no_premap--ForceBlocks] >> test.py::test[schema-select_yamr_fields--Results] [GOOD] >> test.py::test[select-append_to_value_1000--Results] [SKIPPED] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] >> test.py::test[distinct-distinct_groupby-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Results] >> test.py::test[key_filter-contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null_or_data--Results] >> test.py::test[aggr_factory-hll-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-ForceBlocks] >> test.py::test[select-select_all-default.txt-Results] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Results] >> test.py::test[join-left_null_literal--ForceBlocks] [GOOD] >> test.py::test[join-left_null_literal--Results] >> test.py::test[select-discard-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-discard-default.txt-Results] >> test.py::test[pg-select_qstarref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_starref2-default.txt-Results] >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] [GOOD] >> test.py::test[order_by-ordered_fill--ForceBlocks] >> test.py::test[join-bush_dis_in_in-off-Results] [SKIPPED] >> test.py::test[join-cbo_7tables_only_common_join--Results] [SKIPPED] >> test.py::test[join-equi_join_two_mult_keys--Results] >> test.py::test[join-premap_context_dep--Results] [GOOD] >> test.py::test[join-prune_keys--Results] >> test.py::test[join-premap_nonseq_flatmap--Results] [GOOD] >> test.py::test[join-prune_keys--ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_big_primary--Results] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-ForceBlocks] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--ForceBlocks] >> test.py::test[action-insert_after_eval--Results] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--ForceBlocks] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] >> test.py::test[column_group-hint_anon-single-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--Results] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key-off-Results] [SKIPPED] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] >> test.py::test[column_group-hint_non_yson_fail--Results] [SKIPPED] >> test.py::test[column_group-length-single-Results] [SKIPPED] >> test.py::test[column_group-respull--Results] [SKIPPED] >> test.py::test[column_order-select_distinct_star-default.txt-Results] >> test.py::test[join-left_null_literal--Results] [GOOD] >> test.py::test[join-left_null_literal-off-ForceBlocks] >> test.py::test[udf-python_script--ForceBlocks] [GOOD] >> test.py::test[udf-python_script--Results] >> test.py::test[aggregate-compare_by_tuple--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by_tuple--Results] >> test.py::test[aggr_factory-bitand-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Results] >> test.py::test[order_by-order_by_expr_simple--Results] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Results] >> test.py::test[pg_duplicated-duplicated_rowspec--Results] [GOOD] >> test.py::test[pragma-file-default.txt-Results] |80.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[join-split_to_list_as_key-off-Results] [SKIPPED] >> test.py::test[distinct-distinct_groupby-default.txt-Results] [GOOD] >> test.py::test[epochs-use_and_drop_anonymous--ForceBlocks] >> test.py::test[expr-len--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] >> test.py::test[epochs-use_and_drop_anonymous--ForceBlocks] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Results] [SKIPPED] >> test.py::test[expr-len--ForceBlocks] >> test.py::test[select-select_all_from_concat-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt-Results] >> test.py::test[insert-merge_publish--Results] [GOOD] >> test.py::test[insert-override--Results] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--Results] >> test.py::test[action-mixed_eval_typeof_world1--ForceBlocks] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--Results] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-ForceBlocks] >> test.py::test[udf-python_script--Results] [GOOD] >> test.py::test[weak_field-few_source_different_columns--ForceBlocks] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] [GOOD] >> test.py::test[tpch-q15-default.txt-Results] >> test.py::test[select-select_all_from_concat-default.txt-Results] [GOOD] >> test.py::test[select-struct_access_without_table_name--ForceBlocks] >> test.py::test[select-select_all_ordered-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] >> test.py::test[tpch-q10-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min--Results] >> test.py::test[tpch-q11-default.txt-Results] >> test.py::test[aggregate-group_by_session_only_distinct--Results] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] >> test.py::test[key_filter-is_null_or_data--Results] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--Results] >> test.py::test[in-in_ansi_join--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-ForceBlocks] >> test.py::test[order_by-ordered_fill--ForceBlocks] [GOOD] >> test.py::test[order_by-ordered_fill--Results] >> test.py::test[weak_field-weak_field_join_condition--Results] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Results] >> test.py::test[blocks-decimal_comparison--Results] >> test.py::test[window-generic/aggregations_mixed--Results] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag--Results] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] [GOOD] >> test.py::test[select-create_structures-default.txt-Results] >> test.py::test[pragma-file-default.txt-Results] [GOOD] >> test.py::test[produce-discard_reduce_lambda--Results] [SKIPPED] >> test.py::test[aggr_factory-mode-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Results] >> test.py::test[produce-native_desc_reduce_with_presort--Results] [SKIPPED] >> test.py::test[produce-process_streaming-default.txt-Results] >> test.py::test[join-equi_join_two_mult_keys--Results] [GOOD] >> test.py::test[join-grace_join1-map-Results] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] [GOOD] >> test.py::test[join-grace_join1-map-Results] [SKIPPED] >> test.py::test[order_by-sort_with_take--ForceBlocks] >> test.py::test[join-join_comp_map_table-off-Results] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--Results] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] >> test.py::test[join-premap_common_multiparents_no_premap--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Results] >> test.py::test[join-prune_keys--ForceBlocks] [GOOD] >> test.py::test[join-prune_keys--Results] >> test.py::test[pg-select_starref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-Results] >> test.py::test[aggregate-compare_by_tuple--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--ForceBlocks] >> test.py::test[join-left_null_literal-off-ForceBlocks] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] >> test.py::test[join-left_null_literal-off-Results] >> test.py::test[join-left_null_literal-off-Results] [SKIPPED] >> test.py::test[join-left_only_semi_and_other--ForceBlocks] >> test.py::test[join-mergejoin_big_primary--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--Results] >> test.py::test[insert-override--Results] [GOOD] >> test.py::test[insert-override-from_sorted_calc-Results] >> test.py::test[order_by-ordered_fill--Results] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--ForceBlocks] >> test.py::test[select-discard-default.txt-Results] [GOOD] >> test.py::test[select-refselect-1000-ForceBlocks] >> test.py::test[blocks-combine_all_min--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--ForceBlocks] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] >> test.py::test[expr-len--ForceBlocks] [GOOD] >> test.py::test[expr-len--Results] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] >> test.py::test[join-join_table_conflict_fail--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off-Results] [SKIPPED] >> test.py::test[join-left_all-off-Results] >> test.py::test[join-left_all-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] >> test.py::test[pg-tpcds-q16-default.txt-Results] >> test.py::test[order_by-order_by_tablepath_column--Results] [GOOD] >> test.py::test[order_by-ordered_fill--Results] >> test.py::test[column_order-select_distinct_star-default.txt-Results] [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] >> test.py::test[weak_field-few_source_different_columns--ForceBlocks] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Results] >> test.py::test[join-prune_keys--Results] [GOOD] >> test.py::test[join-pullup_context_dep-off-Results] >> test.py::test[join-pullup_context_dep-off-Results] [SKIPPED] >> test.py::test[join-pullup_cross--Results] >> test.py::test[select-struct_access_without_table_name--ForceBlocks] [GOOD] >> test.py::test[select-struct_access_without_table_name--Results] >> test.py::test[expr-len--Results] [GOOD] >> test.py::test[expr-yql-10180-default.txt-ForceBlocks] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-ForceBlocks] >> test.py::test[action-nested_rewrite_io-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Results] [GOOD] >> test.py::test[join-pullup_rownumber--ForceBlocks] >> test.py::test[action-nested_rewrite_io-default.txt-Results] >> test.py::test[blocks-combine_hashed_minmax_double--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] [GOOD] >> test.py::test[select-deep_udf_call--ForceBlocks] >> test.py::test[select-create_structures-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] >> test.py::test[key_filter-part_key_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-ranges--Results] >> test.py::test[join-prune_keys--Results] [GOOD] >> test.py::test[join-pullup_random--ForceBlocks] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] [GOOD] >> test.py::test[produce-reduce_typeinfo--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_typeinfo--Results] [SKIPPED] >> test.py::test[produce-reduce_with_trivial_remaps--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_trivial_remaps--Results] [SKIPPED] >> test.py::test[pg-select_unionall_self-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q08-default.txt-Results] >> test.py::test[sampling-subquery_limit-default.txt-ForceBlocks] >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] >> test.py::test[weak_field-few_source_different_columns--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--ForceBlocks] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Results] >> test.py::test[select-struct_access_without_table_name--Results] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt-ForceBlocks] >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_list-default.txt-Results] >> test.py::test[order_by-sort_with_take--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_with_take--Results] >> test.py::test[action-nested_rewrite_io-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-ForceBlocks] >> test.py::test[insert-override-from_sorted_calc-Results] [GOOD] >> test.py::test[insert-override-from_sorted_desc-Results] |80.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[select-create_structures-default.txt-Results] [GOOD] >> test.py::test[tpch-q15-default.txt-Results] [GOOD] >> test.py::test[udf-python_script_from_file--Results] [SKIPPED] >> test.py::test[udf-udaf_lambda-default.txt-Results] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_count--Results] >> test.py::test[aggregate-list_nullable--Results] >> test.py::test[order_by-warn_offset_wo_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--Results] >> test.py::test[join-mergejoin_big_primary_unique--Results] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-ForceBlocks] >> test.py::test[select-refselect-1000-ForceBlocks] [GOOD] >> test.py::test[select-refselect-1000-Results] >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--ForceBlocks] [GOOD] >> test.py::test[join-left_only_semi_and_other--ForceBlocks] [GOOD] >> test.py::test[join-left_only_semi_and_other--Results] >> test.py::test[aggregate-group_by_gs_grouping--Results] >> test.py::test[aggregate-compact_distinct--Results] [SKIPPED] >> test.py::test[aggregate-count_distinct_with_filter--Results] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] >> test.py::test[order_by-sort_with_take--Results] [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Results] >> test.py::test[aggr_factory-bottom-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] >> test.py::test[pg-aggregate_combine_all--ForceBlocks] >> test.py::test[expr-yql-10180-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Results] >> test.py::test[order_by-ordered_fill--Results] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] >> test.py::test[weak_field-weak_field_join_where--Results] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] >> test.py::test[select-refselect-1000-Results] [GOOD] >> test.py::test[select-swap_columns-default.txt-ForceBlocks] >> test.py::test[order_by-warn_offset_wo_sort--Results] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-ForceBlocks] >> test.py::test[aggregate-agg_phases_table3-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] |80.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[join-mergejoin_force_align1-off-Results] [SKIPPED] >> test.py::test[select-deep_udf_call--ForceBlocks] [GOOD] >> test.py::test[select-deep_udf_call--Results] >> test.py::test[blocks-combine_hashed_count--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--ForceBlocks] >> test.py::test[window-lagging/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-mixed/aggregations--Results] >> test.py::test[join-pullup_random--ForceBlocks] [GOOD] >> test.py::test[join-pullup_random--Results] >> test.py::test[join-anyjoin_common_dup--Results] [GOOD] >> test.py::test[key_filter-ranges--Results] [GOOD] >> test.py::test[join-convert_key-off-Results] [SKIPPED] >> test.py::test[join-force_merge_join-default.txt-Results] >> test.py::test[key_filter-string_with_ff-default.txt-Results] >> test.py::test[join-pullup_rownumber--ForceBlocks] [GOOD] >> test.py::test[join-pullup_rownumber--Results] >> test.py::test[expr-yql-10180-default.txt-Results] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q08-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q18-default.txt-Results] >> test.py::test[sampling-subquery_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_limit-default.txt-Results] >> test.py::test[produce-reduce_all_list-default.txt-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] >> test.py::test[join-pullup_cross--Results] [GOOD] >> test.py::test[join-pullup_exclusion-off-Results] >> test.py::test[type_v3-append_diff_flags--Results] >> test.py::test[insert-override-from_sorted_desc-Results] [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[join-pullup_left--Results] >> test.py::test[aggregate-group_by_gs_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_interval--ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] >> test.py::test[flatten_by-flatten_member_is_struct--Results] [GOOD] >> test.py::test[hor_join-group_sampling--Results] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Results] [SKIPPED] >> test.py::test[in-basic_in-default.txt-Results] >> test.py::test[select-deep_udf_call--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test.py::test[join-left_semi_with_other--Results] >> test.py::test[pg-tpcds-q30-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Results] >> test.py::test[sampling-subquery_limit-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] >> test.py::test[blocks-distinct_pure_all--Results] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Results] >> test.py::test[aggr_factory-bitxor-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Results] >> test.py::test[join-left_trivial-off-ForceBlocks] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] [GOOD] >> test.py::test[blocks-compare--Results] >> test.py::test[udf-udaf_lambda-default.txt-Results] [GOOD] >> test.py::test[udf-udaf_short--Results] >> test.py::test[join-left_only_semi_and_other--Results] [GOOD] >> test.py::test[join-left_only_with_other-off-ForceBlocks] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] [GOOD] >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test.py::test[join-pullup_rownumber--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename--ForceBlocks] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-ForceBlocks] |80.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[select-deep_udf_call--Results] [GOOD] |80.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[sampling-subquery_limit-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_nullable--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_interval--ForceBlocks] [GOOD] >> test.py::test[join-pullup_random--Results] [GOOD] >> test.py::test[join-star_join_inners--ForceBlocks] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] >> test.py::test[aggregate-group_by_hop_bad_interval--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--ForceBlocks] >> test.py::test[select-swap_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-swap_columns-default.txt-Results] >> test.py::test[aggregate-count_distinct_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Results] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test.py::test[pg-select_columnref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] >> test.py::test[datetime-date_tz_table_sort_desc--Results] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Results] >> test.py::test[pg-aggregate_combine_all--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_combine_all--Results] >> test.py::test[select-table_content_from_sort_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] [GOOD] >> test.py::test[window-current/session--Results] >> test.py::test[pg-tpcds-q30-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> test.py::test[produce-reduce_all_expr-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_esc_string--ForceBlocks] >> test.py::test[produce-reduce_all_expr-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream--ForceBlocks] >> test.py::test[type_v3-append_diff_flags--Results] [GOOD] >> test.py::test[type_v3-singulars--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script--Results] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] [GOOD] >> test.py::test[pg-aggregate_combine_all--Results] >> test.py::test[pg-tpcds-q18-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt-Results] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable >> test.py::test[produce-reduce_multi_in-empty-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Results] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] [GOOD] >> test.py::test[insert-select_relabel-default.txt-ForceBlocks] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] [GOOD] >> TFlatTest::Ls >> test.py::test[insert_monotonic-break_unique_fail--Results] [SKIPPED] >> test.py::test[join-bush_dis_in_in_in--Results] >> test.py::test[select-swap_columns-default.txt-Results] [GOOD] >> test.py::test[select-to_dict-default.txt-ForceBlocks] >> test.py::test[pg-select_columnref1-default.txt-Results] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] >> test.py::test[pg-select_where-default.txt-ForceBlocks] >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> test.py::test[pg-aggregate_combine_all--Results] [GOOD] >> test.py::test[pg-select_from_columns-default.txt-ForceBlocks] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-max-default.txt-Results] >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] >> test.py::test[key_filter-tzdate--Results] >> test.py::test[produce-reduce_all_with_python_input_stream--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream--Results] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> TFlatTest::LsPathId [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream--Results] [GOOD] >> test.py::test[sampling-map-dynamic-ForceBlocks] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[join-force_merge_join-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Results] [GOOD] >> test.py::test[join-full_equal_not_null--Results] >> test.py::test[aggr_factory-min_by-default.txt-ForceBlocks] >> test.py::test[in-basic_in-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2025-05-05T09:41:31.655973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894488424902838:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:31.656217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b28/r3tmp/tmp5p2vST/pdisk_1.dat 2025-05-05T09:41:31.707622Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15731 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:31.785762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:31.785804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:31.786718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:31.786949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746438091832 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746438091832 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746438091846 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746438091846 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 Shard... (TRUNCATED) 2025-05-05T09:41:31.799340Z node 1 :TX_PROXY ERROR: Actor# [1:7500894488424903452:2322] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155 TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746438091832 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746438091846 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746438091832 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1746438091846 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "arcadia" Path... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1746438091860 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsI... (TRUNCATED) 2025-05-05T09:41:32.059248Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894492404731048:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:32.061465Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b28/r3tmp/tmpH3YO1V/pdisk_1.dat 2025-05-05T09:41:32.077279Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6769 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:32.164106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:32.164151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:32.164626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:32.165120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... >> test.py::test[join-left_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-left_trivial-off-Results] >> test.py::test[join-left_trivial-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_rename--ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] >> test.py::test[join-pullup_left--Results] [GOOD] >> test.py::test[udf-udaf_short--Results] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--Results] >> test.py::test[join-pullup_left-off-Results] [SKIPPED] >> test.py::test[join-pullup_renaming--Results] |80.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] [GOOD] >> test.py::test[join-left_only_with_other-off-ForceBlocks] [GOOD] >> test.py::test[blocks-compare--Results] [GOOD] >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> test.py::test[blocks-date_add_interval_scalar--Results] >> test.py::test[pg-tpcds-q32-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q36-default.txt-Results] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] >> test.py::test[pg-tpcds-q41-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Results] >> test.py::test[weak_field-weak_field_esc_string--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> test.py::test[join-star_join_inners--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners--Results] >> test.py::test[udf-named_args_for_script--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] [GOOD] >> test.py::test[udf-regexp_udf--Results] >> test.py::test[join-star_join_semionly-off-ForceBlocks] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:34.132104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:34.132124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:34.132128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:34.132131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:34.132141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:34.132143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:34.132150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:34.132160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:34.132223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:34.132275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:34.141041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:34.141064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:34.143972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:34.144680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:34.144719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:34.146094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:34.146140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:34.146229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:34.146480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:34.147267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:34.147538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:34.147547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:34.147561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:34.147566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:34.147570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:34.147599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.148809Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:41:34.162710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:34.162796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.162859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:34.162930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:34.162942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.163741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:34.163766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:34.163818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.163826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:34.163830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:34.163834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:34.164199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.164208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:34.164212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:34.164464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.164472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.164475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:34.164481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:34.164937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:34.165284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:34.165318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:34.165468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:34.165486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:34.165492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:34.165547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:34.165552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:34.165578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:34.165586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:34.165984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:34.165991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:34.166031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:34.166035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:34.166091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.166096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:34.166107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:34.166111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:34.166114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:34.166116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:34.166119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:34.166122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:34.166125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:34.166128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:34.166136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:34.166141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:34.166143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:34.166365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:34.166377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:34.273710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T09:41:34.273740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:34.274098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T09:41:34.274126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T09:41:34.274275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:34.274294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:34.274302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T09:41:34.274331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T09:41:34.274360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:34.276262Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-05-05T09:41:34.278702Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T09:41:34.279670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:34.279680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T09:41:34.279754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:34.279758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T09:41:34.279865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.279874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:26213 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 75C2F7B2-CF6C-446F-B7B1-C91E5CDF5AA9 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / FAKE_COORDINATOR: Erasing txId 102 61 2025-05-05T09:41:34.280112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:34.280127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:34.280132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T09:41:34.280138Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T09:41:34.280144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T09:41:34.280158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-05T09:41:34.280270Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T09:41:34.281298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:26213 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 11E5F093-7F65-4885-83FC-14D04E29708E amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T09:41:34.281757Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-05-05T09:41:34.281769Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-05-05T09:41:34.281787Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:26213 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F34C61BD-D3D9-4081-8E17-8EC1D4DEFF0E amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-05-05T09:41:34.282420Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-05-05T09:41:34.282428Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:34.283060Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:34.294350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:34.294378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:34.294410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:34.294424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:34.294440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:34.294445Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.294449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:34.294458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:34.294512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:34.295130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.295223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:34.295236Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:34.295248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:34.295253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:34.295258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:34.295261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:34.295267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:34.295282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T09:41:34.295289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:34.295295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:34.295299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:34.295321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:34.295703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:34.295712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 |80.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> test.py::test[pg-select_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_where-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] >> test.py::test[pg-select_from_columns-default.txt-ForceBlocks] [GOOD] |80.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> test.py::test[pg-aggregate_combine_all--Results] [GOOD] >> test.py::test[pg-pg_types_orderby--Results] >> test.py::test[pg-pg_types_orderby--Results] [SKIPPED] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test.py::test[insert-select_relabel-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_relabel-default.txt-Results] >> test.py::test[select-to_dict-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-to_dict-default.txt-Results] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> test.py::test[pg-tpcds-q41-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-ForceBlocks] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-ForceBlocks] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> test.py::test[sampling-map-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-map-dynamic-Results] >> test.py::test[join-left_semi_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_inner--Results] >> test.py::test[distinct-distinct_one_count-default.txt-Results] [GOOD] >> test.py::test[dq-pool_trees_whitelist--Results] |80.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[pg-select_from_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-pool_trees_whitelist--Results] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-Results] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous--Results] [SKIPPED] >> test.py::test[expr-constraints_of--Results] >> test.py::test[pg-select_where-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-ForceBlocks] >> TBackupTests::BackupUuidColumn[Zstd] >> test.py::test[window-current/session--Results] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Results] [GOOD] >> test.py::test[produce-reduce_with_flat_python_stream--Results] [SKIPPED] >> test.py::test[in-in_sorted--Results] [GOOD] >> test.py::test[ql_filter-integer_many_right--Results] >> test.py::test[aggregate-group_by_gs_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_list_key--Results] [SKIPPED] |80.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Results] >> test.py::test[aggregate-group_by_hop_only--Results] [SKIPPED] >> test.py::test[aggregate-group_by_tz_date--Results] >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> test.py::test[insert-select_relabel-default.txt-Results] [GOOD] >> test.py::test[insert-unique_distinct_hints--ForceBlocks] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> test.py::test[join-star_join_inners--Results] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-ForceBlocks] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> test.py::test[window-mixed/aggregations--Results] [GOOD] >> test.py::test[window-rank/plain--Results] >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:36.517211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:36.517244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:36.517250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:36.517256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:36.517271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:36.517276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:36.517287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:36.517302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:36.517388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:36.517474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:36.531639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:36.531670Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:36.535568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:36.535878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:36.535931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:36.537233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:36.537294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:36.537411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.537654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:36.547534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:36.548000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:36.548021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:36.548044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:36.548055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:36.548061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:36.548107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.550025Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:41:36.574651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:36.574756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.574838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:36.574900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:36.574936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.575955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.575990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:36.576062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.576075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:36.576080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:36.576086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:36.576609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.576622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:36.576628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:36.577014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.577026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.577033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:36.577041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:36.577754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:36.578178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:36.578225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:36.578442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.578471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:36.578478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:36.578555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:36.578563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:36.578600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:36.578613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:36.579308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:36.579318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:36.579368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:36.579374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:36.579449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.579456Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:36.579469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:36.579474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:36.579480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:36.579483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:36.579488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:36.579495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:36.579500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:36.579505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:36.579521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:36.579527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:36.579532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:36.579872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:36.579890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... de 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1FE18036-31DD-42AD-8CE8-157FE7482450 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 2025-05-05T09:41:36.751341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:36.751351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:23648 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: 2025-05-05T09:41:36.751355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3A8F8271-99B7-4CF2-A0FC-DC831E52BAA4 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== 2025-05-05T09:41:36.751389Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T09:41:36.751409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-05T09:41:36.751435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-05-05T09:41:36.751470Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:478:2434], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-05-05T09:41:36.751475Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:478:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:36.751528Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T09:41:36.751852Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:477:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:23648 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA FAKE_COORDINATOR: Erasing txId 102 amz-sdk-invocation-id: CA15E2B4-733F-402F-9D00-27E4A6C56159 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-05-05T09:41:36.753171Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-05-05T09:41:36.753188Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:472:2430] 2025-05-05T09:41:36.753218Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:473:2431], sender# [1:472:2430], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-05-05T09:41:36.753290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:23648 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B9C08899-505B-4C46-9904-CF317680C2B9 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-05-05T09:41:36.753672Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-05-05T09:41:36.753676Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2431], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:36.754554Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2430], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:36.755313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:36.755326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:36.755340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:36.755350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:36.755358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.755382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:36.755442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:36.755449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-05-05T09:41:36.755460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:36.755468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:36.755473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.755477Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.755482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:36.755486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T09:41:36.755491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:36.755504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:36.755940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.756000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.756057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.756063Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:36.756071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:36.756074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:36.756077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:36.756079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:36.756082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:36.756090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2336] message: TxId: 102 2025-05-05T09:41:36.756094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:36.756098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:36.756103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:36.756122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T09:41:36.756415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:36.756423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:452:2411] TestWaitNotification: OK eventTxId 102 >> test.py::test[join-full_equal_not_null--Results] [GOOD] >> test.py::test[join-full_trivial--Results] >> test.py::test[select-to_dict-default.txt-Results] [GOOD] >> test.py::test[select-trivial_between-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:36.828652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:36.828681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:36.828687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:36.828692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:36.828704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:36.828709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:36.828718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:36.828734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:36.828822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:36.828896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:36.841885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:36.841915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:36.846109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:36.846440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:36.846491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:36.847984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:36.848048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:36.848156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.848396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:36.849141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:36.849473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:36.849486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:36.849506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:36.849513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:36.849519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:36.849556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.851183Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:41:36.865811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:36.865908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.865985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:36.866042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:36.866057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.867013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.867042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:36.867097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.867107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:36.867110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:36.867114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:36.867522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.867529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:36.867532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:36.867783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.867789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.867794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:36.867800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:36.868275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:36.868608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:36.868641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:36.868783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.868805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:36.868811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:36.868862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:36.868866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:36.868891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:36.868899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:36.869234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:36.869239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:36.869277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:36.869281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:36.869340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.869345Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:36.869354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:36.869358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:36.869361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:36.869363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:36.869366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:36.869369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:36.869372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:36.869375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:36.869383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:36.869389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:36.869392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:36.869606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:36.869615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... FO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:36.982462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T09:41:36.982495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:36.982848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T09:41:36.982880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T09:41:36.983066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.983089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:36.983097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T09:41:36.983117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T09:41:36.983150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:36.985939Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-05-05T09:41:36.989700Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T09:41:36.990810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:36.990825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:11772 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C19ACA39-82CF-4FA1-8A5A-D5190281486B amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD 2025-05-05T09:41:36.990902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T09:41:36.990934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T09:41:36.991094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.991106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:36.991295Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T09:41:36.991885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:36.991906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:36.991912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T09:41:36.991919Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T09:41:36.991926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T09:41:36.991949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:11772 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CF5BE676-B54C-478F-A06B-2373EB8F7A49 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T09:41:36.992676Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-05-05T09:41:36.992696Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-05-05T09:41:36.992768Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-05-05T09:41:36.993213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:11772 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 50D28B46-1639-44FA-A704-15B3597A49EC amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-05-05T09:41:36.994019Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-05-05T09:41:36.994036Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:36.994072Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:36.996012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T09:41:36.996049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:36.996078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T09:41:36.996092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T09:41:36.996105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:36.996110Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.996114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:36.996121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:36.996165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:36.996813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.996909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:36.996920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:36.996934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:36.996939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:36.996944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:36.996947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:36.996952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:36.996969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T09:41:36.996976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:36.996981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:36.996985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:36.997014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:36.997573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:36.997588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 >> test.py::test[sampling-map-dynamic-Results] [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> test.py::test[schema-skip_complex_type2--ForceBlocks] >> test.py::test[pg-tpcds-q36-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q47-default.txt-Results] >> test.py::test[udf-regexp_udf--Results] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Results] [GOOD] >> test.py::test[aggr_factory-min_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-min_by-default.txt-Results] >> test.py::test[blocks-group_by_complex_key--Results] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> test.py::test[union_all-mix_map_and_project--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-Results] >> test.py::test[aggregate-group_by_rollup_key_check--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous--ForceBlocks] >> test.py::test[join-star_join_semionly-off-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[join-star_join_semionly-off-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren-off-ForceBlocks] >> test.py::test[table_range-concat_with_view--Results] >> TBackupTests::ShouldSucceedOnLargeData[Zstd] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> test.py::test[aggr_factory-max-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] >> test.py::test[udf-udf_call_with_group_and_limit--Results] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] >> test.py::test[expr-constraints_of--Results] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-yql-10180-default.txt-Results] >> test.py::test[pg-tpcds-q78-default.txt-Results] >> test.py::test[blocks-date_add_interval_scalar--Results] [GOOD] >> TBackupTests::BackupUuidColumn[Raw] >> test.py::test[blocks-date_equals_scalar--Results] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> test.py::test[key_filter-tzdate--Results] [GOOD] >> test.py::test[lambda-lambda_udf--Results] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> test.py::test[join-pullup_renaming--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] >> test.py::test[ql_filter-integer_many_right--Results] [GOOD] >> test.py::test[sampling-map-keyfilter-Results] >> test.py::test[in-in_tuple_table-default.txt-Results] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_yql_type--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:39.249129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:39.249153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:39.249157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:39.249160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:39.249169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:39.249172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:39.249178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:39.249189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:39.249250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:39.249300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:39.261914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:39.261940Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:39.264755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:39.264853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:39.264894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:39.269221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:39.269285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:39.269405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.269487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:39.271519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.272019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:39.272043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.272100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:39.272112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:39.272121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:39.272144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.274244Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-05T09:41:39.293743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:39.293834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.293899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:39.293950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:39.293962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.294670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.294694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:39.294744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.294752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:39.294756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:39.294760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:39.295169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.295178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:39.295182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:39.295616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.295647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.295654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.295662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.296283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:39.296697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:39.296726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:39.296894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.296919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:39.296930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.296994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:39.297002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.297027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:39.297039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:39.297470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:39.297479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:39.297528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.297533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:39.297581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.297586Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:39.297596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:39.297599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.297602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:39.297604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.297607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:39.297612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.297616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:39.297621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:39.297632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:39.297639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:39.297644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:39.297975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:39.297992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... FO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:39.407462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T09:41:39.407495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:39.407877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T09:41:39.407902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T09:41:39.408142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.408162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:39.408168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T09:41:39.408184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T09:41:39.408206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:39.410323Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2385], attempt# 0 2025-05-05T09:41:39.412914Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2385], sender# [1:413:2384] 2025-05-05T09:41:39.413768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:39.413779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T09:41:39.413842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.413845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T09:41:39.413978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.413985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-05-05T09:41:39.414083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:39.414091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:39.414094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T09:41:39.414098Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T09:41:39.414103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T09:41:39.414120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:27581 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01ABCC9B-CD62-4ED1-9356-B993462D3078 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T09:41:39.414434Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T09:41:39.415194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:27581 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 72602AA7-33D8-471F-992A-1B65122DB23F amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T09:41:39.415478Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-05-05T09:41:39.415488Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2384] 2025-05-05T09:41:39.415533Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2385], sender# [1:413:2384], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27581 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2E1B5BCA-913F-4269-80CA-8DBB0E3004A7 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-05-05T09:41:39.416198Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-05-05T09:41:39.416211Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2385], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:39.416258Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2384], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:39.428095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:39.428121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:39.428152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:39.428166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:39.428179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.428184Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.428189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:39.428196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:39.428246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:39.428856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.428945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.428956Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:39.428968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:39.428973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:39.428978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:39.428981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:39.428986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:39.429000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:337:2316] message: TxId: 102 2025-05-05T09:41:39.429007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:39.429011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:39.429016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:39.429044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:39.429686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:39.429711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:399:2371] TestWaitNotification: OK eventTxId 102 >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:39.739081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:39.739107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:39.739112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:39.739116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:39.739128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:39.739131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:39.739141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:39.739155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:39.739234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:39.739303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:39.750347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:39.750368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:39.753820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:39.754111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:39.754143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:39.755472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:39.755521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:39.755592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.755852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:39.757081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.757452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:39.757467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.757490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:39.757499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:39.757506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:39.757543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.759059Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:41:39.774188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:39.774274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.774338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:39.774390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:39.774402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.775138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.775161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:39.775214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.775222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:39.775226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:39.775230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:39.775581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.775589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:39.775592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:39.775995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.776004Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.776009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.776016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.776574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:39.776982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:39.777020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:39.777175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.777201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:39.777208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.777262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:39.777267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.777289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:39.777298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:39.777713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:39.777722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:39.777762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.777767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:39.777830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.777837Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:39.777848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:39.777853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.777858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:39.777861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.777865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:39.777869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.777874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:39.777878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:39.777890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:39.777896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:39.777901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:39.778212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:39.778233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... HEMESHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:39.890890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T09:41:39.890935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:39.891347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T09:41:39.891383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T09:41:39.891532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.891551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:39.891557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T09:41:39.891570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T09:41:39.891597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:39.893290Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-05-05T09:41:39.896550Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T09:41:39.897450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:39.897460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T09:41:39.897529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.897535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T09:41:39.897630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.897638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:5607 Accept: */* 2025-05-05T09:41:39.897817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 Connection: Upgrade, HTTP2-Settings2025-05-05T09:41:39.897829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:39.897838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 Upgrade2025-05-05T09:41:39.897842Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 : 2025-05-05T09:41:39.897850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 30F0D201-8677-4EF0-9494-EDC4BF359A51 amz-sdk-request: attempt=1 content-length2025-05-05T09:41:39.897864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true : 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T09:41:39.898166Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T09:41:39.898879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:5607 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01CB879A-C587-4F98-9560-B954206E68A8 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T09:41:39.899169Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-05-05T09:41:39.899186Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-05-05T09:41:39.899203Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:5607 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A81420F3-E409-4AC5-BA6B-0BB887C3CAE3 amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-05-05T09:41:39.899614Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-05-05T09:41:39.899622Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:39.899648Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:39.911496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T09:41:39.911524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:39.911555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T09:41:39.911569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-05-05T09:41:39.911583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.911589Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.911594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:39.911601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:39.911668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:39.912216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.912261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.912270Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:39.912286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:39.912291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:39.912296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:39.912299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:39.912305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:39.912336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T09:41:39.912343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:39.912349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:39.912354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:39.912380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:39.912843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:39.912855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 >> test.py::test[pg-tpcds-q78-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_inner--Results] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] >> test.py::test[pg-tpcds-q05-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] [GOOD] >> test.py::test[window-empty/aggregations--Results] >> test.py::test[aggregate-group_by_tz_date--Results] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] >> test.py::test[aggr_factory-min_by-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--ForceBlocks] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] >> test.py::test[schema-skip_complex_type2--ForceBlocks] [GOOD] >> test.py::test[schema-skip_complex_type2--Results] >> test.py::test[select-trivial_between-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_between-default.txt-Results] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> test.py::test[join-bush_dis_in_in_in--Results] [GOOD] >> test.py::test[join-bush_in_in--Results] >> test.py::test[expr-yql-10180-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Results] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> test.py::test[blocks-date_less_or_equal_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Results] >> test.py::test[join-three_equalities_paren-off-ForceBlocks] [GOOD] >> test.py::test[join-three_equalities_paren-off-Results] [SKIPPED] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> test.py::test[join-star_join_inners_vk_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-Results] [SKIPPED] >> test.py::test[join-trivial_view--ForceBlocks] >> test.py::test[union_all-mix_map_and_project--Results] [GOOD] >> test.py::test[union_all-union_all_multiple-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> test.py::test[join-full_trivial--Results] [GOOD] >> test.py::test[join-inner_trivial-off-Results] [SKIPPED] >> test.py::test[join-inner_trivial_from_concat-off-Results] [SKIPPED] >> test.py::test[join-join_comp_common_table-off-Results] [SKIPPED] >> test.py::test[join-join_comp_map_table--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:41.540727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:41.540758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:41.540764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:41.540770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:41.540783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:41.540788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:41.540799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:41.540814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:41.540905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:41.540985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:41.555174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:41.555200Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:41.563346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:41.563980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:41.564035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:41.565676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:41.565741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:41.565863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:41.566149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:41.567056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:41.567435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:41.567448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:41.567472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:41.567482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:41.567489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:41.567534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.569177Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:41:41.591700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:41.591799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.591874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:41.591932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:41.591943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.592823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:41.592861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:41.592927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.592938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:41.592944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:41.592950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:41.593455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.593468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:41.593474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:41.593815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.593824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.593831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:41.593839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:41.594545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:41.595045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:41.595096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:41.595317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:41.595347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:41.595356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:41.595438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:41.595446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:41.595484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:41.595499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:41.596002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:41.596011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:41.596063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:41.596069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:41.596150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.596158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:41.596172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:41.596180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:41.596186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:41.596190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:41.596195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:41.596201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:41.596208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:41.596212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:41.596227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:41.596234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:41.596239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:41.596609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:41.596628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 1.csv / / 11 2025-05-05T09:41:41.741294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:41.741373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T09:41:41.741488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:41.741496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T09:41:41.741644Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T09:41:41.742233Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:478:2434], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-05-05T09:41:41.742250Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:478:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:41.742286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.742298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:41.742573Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:477:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:19595 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 14F7043F-6668-4440-BE16-58510BC88E88 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-05-05T09:41:41.743893Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-05-05T09:41:41.744015Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:472:2430] 2025-05-05T09:41:41.744041Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:473:2431], sender# [1:472:2430], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-05-05T09:41:41.744092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:41.744109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:41.744115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T09:41:41.744127Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T09:41:41.744135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-05-05T09:41:41.744155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:19595 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A8846A2C-2289-40D6-956C-96BBF62AD80E amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-05-05T09:41:41.745581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-05-05T09:41:41.745639Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2431], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-05-05T09:41:41.745648Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2431], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:41.745691Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2430], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:41.758601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:41.758630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:41.758665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:41.758681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:41.758697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:41.758746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:41.758983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:41.758995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-05-05T09:41:41.759012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:41.759024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:41:41.759031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:41.759037Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.759043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:41.759048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T09:41:41.759055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:41.759073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:41.760078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.760255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.760384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:41.760397Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:41.760414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:41.760420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:41.760426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:41.760429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:41.760435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:41.760455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2336] message: TxId: 102 2025-05-05T09:41:41.760463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:41.760470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:41.760478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:41.760507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T09:41:41.761050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:41.761064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:452:2411] TestWaitNotification: OK eventTxId 102 >> test.py::test[join-mapjoin_with_anonymous--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_with_anonymous--Results] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] [GOOD] >> test.py::test[window-current/aggregations--ForceBlocks] |80.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[join-three_equalities_paren-off-Results] [SKIPPED] >> test.py::test[schema-skip_complex_type2--Results] [GOOD] >> test.py::test[select-anon_clash--ForceBlocks] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-init_view_after_eval-default.txt-Results] >> test.py::test[lambda-lambda_udf--Results] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Results] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> test.py::test[select-trivial_between-default.txt-Results] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q47-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q66-default.txt-Results] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check--Results] >> test.py::test[blocks-group_by_complex_key--Results] [GOOD] >> test.py::test[table_range-concat_with_view--Results] [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail--Results] >> test.py::test[blocks-if--Results] >> test.py::test[sampling-map-keyfilter-Results] [GOOD] >> test.py::test[sampling-read--Results] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[in-in_with_opt_tuple-default.txt-Results] [GOOD] >> test.py::test[insert-append-with_view-Results] [SKIPPED] >> test.py::test[insert-append_view_fail--Results] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> test.py::test[pg-select_yql_type--Results] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] >> test.py::test[pg-tpcds-q85-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Results] >> test.py::test[insert-unique_distinct_hints--ForceBlocks] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] >> test.py::test[insert-unique_distinct_hints--Results] >> TFlatTest::WriteMergeAndRead >> test.py::test[join-mapjoin_with_anonymous--Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted--ForceBlocks] >> TFlatTest::Init >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test.py::test[select-anon_clash--ForceBlocks] [GOOD] >> test.py::test[select-anon_clash--Results] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-ForceBlocks] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] >> TFlatTest::SelectRangeReverse >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> test.py::test[table_range-each_with_non_existing_all_fail--Results] [GOOD] >> test.py::test[table_range-range_over_filter--Results] >> test.py::test[aggregate-avg_and_sum_float--ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Results] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by--Results] >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> test.py::test[pg-tpcds-q85-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-ForceBlocks] >> test.py::test[insert-append_view_fail--Results] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Results] |81.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Results] >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> TLocksTest::BrokenLockErase >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted--Results] >> test.py::test[join-trivial_view--ForceBlocks] [GOOD] >> test.py::test[join-trivial_view--Results] >> TFlatTest::WriteSplitAndRead [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> test.py::test[aggregate-group_by_rollup_key_check--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join--ForceBlocks] >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] >> test.py::test[union_all-union_all_multiple-default.txt-Results] [GOOD] >> test.py::test[view-file_eval--Results] >> test.py::test[insert-unique_distinct_hints--Results] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--ForceBlocks] >> test.py::test[view-init_view_after_eval-default.txt-Results] [GOOD] >> test.py::test[view-standalone_view_lambda--Results] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] >> test.py::test[pg-tpcds-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-05-05T09:41:43.920618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894539373970784:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:43.920646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b24/r3tmp/tmpBt6HoB/pdisk_1.dat 2025-05-05T09:41:43.974086Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11199 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:44.054389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:44.054449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:44.055290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:44.055463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:44.065737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:44.142498Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:44.143464Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:44.147893Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:44.148748Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-05-05T09:41:44.155703Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.156022Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T09:41:44.156049Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-05T09:41:44.156498Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.156651Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2025-05-05T09:41:44.156796Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T09:41:44.156808Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-05-05T09:41:44.156840Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T09:41:44.156877Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-05T09:41:44.157334Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.157559Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T09:41:44.157571Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-05T09:41:44.158155Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2025-05-05T09:41:44.158367Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T09:41:44.158377Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-05-05T09:41:44.158406Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.159667Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T09:41:44.159692Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438104173 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-05-05T09:41:44.162822Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.163457Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.163516Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.163753Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.163791Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.163857Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T09:41:44.164004Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.164033Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.164094Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T09:41:44.164222Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.164248Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.164306Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T09:41:44.164438Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.164462Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.164518Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T09:41:44.164642Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.164665Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.164716Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T09:41:44.164865Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.164889Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.164941Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T09:41:44.165067Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.165091Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.165142Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T09:41:44.165279Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.165299Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.165351Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T09:41:44.165471Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.165490Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.165539Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T09:41:44.165656Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.165679Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.165729Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T09:41:44.165844Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.165866Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.165913Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T09:41:44.166038Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.166061Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.166111Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T09:41:44.166226Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.166258Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:44.166308Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037888 restored its data 2025-05-05T09:41:44.166435Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.166464Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:44.166521Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 at 72075186224037889 restored its data 2025-05-05T09:41:44.166644Z node 1 :TX_DATASHARD DEBUG: tx 281474976715680 released its data 2025-05-05T09:41:44.166673Z node 1 :TX_DATASHAR ... 4046644480, message: Source { RawX1: 7500894541898936883 RawX2: 4503608217307459 } TabletId: 72075186224037892 State: 4 2025-05-05T09:41:44.613806Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:44.613823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894541898936883 RawX2: 4503608217307459 } TabletId: 72075186224037892 State: 4 2025-05-05T09:41:44.613827Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T09:41:44.613855Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-05T09:41:44.613859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:44.613874Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-05T09:41:44.613874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:44.613881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:44.613881Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T09:41:44.614289Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:44.614303Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:44.614520Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:44.614533Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:44.614702Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:44.614715Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:44.614907Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:44.614961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:44.614969Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:44.614983Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-05T09:41:44.614993Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-05T09:41:44.615027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T09:41:44.615048Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T09:41:44.615080Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-05-05T09:41:44.615087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T09:41:44.615106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T09:41:44.615124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T09:41:44.615203Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T09:41:44.615306Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-05T09:41:44.615401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T09:41:44.615413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T09:41:44.615424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T09:41:44.615432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-05T09:41:44.615436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T09:41:44.615438Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:44.615472Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T09:41:44.615493Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-05-05T09:41:44.615686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894541898936540 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-05T09:41:44.615698Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:44.615725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894541898936881 RawX2: 4503608217307457 } TabletId: 72075186224037890 State: 4 2025-05-05T09:41:44.615733Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:44.615768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894541898936882 RawX2: 4503608217307458 } TabletId: 72075186224037891 State: 4 2025-05-05T09:41:44.615778Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:44.615807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:44.615809Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T09:41:44.615828Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T09:41:44.615828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:44.615835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:44.615834Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T09:41:44.616371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T09:41:44.616382Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T09:41:44.616387Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T09:41:44.616391Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T09:41:44.616429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:44.616472Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T09:41:44.616473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:44.616492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T09:41:44.616493Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T09:41:44.616510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:44.616532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:44.616565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:44.616570Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T09:41:44.616573Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T09:41:44.616573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:41:44.616575Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T09:41:44.616584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:44.616647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T09:41:44.616656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T09:41:44.616662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:44.616665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T09:41:44.616737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T09:41:44.616747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T09:41:44.616757Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:44.616904Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T09:41:44.616920Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-05-05T09:41:44.617127Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T09:41:44.617139Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 >> test.py::test[pg-tpcds-q66-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q71-default.txt-Results] >> test.py::test[like-like_clause_escape-default.txt-Results] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] >> test.py::test[window-rank/plain--Results] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] |81.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-05-05T09:41:44.268277Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894543480147792:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:44.268386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b20/r3tmp/tmpfMzuJT/pdisk_1.dat 2025-05-05T09:41:44.315870Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9443 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:44.369699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:44.369721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:44.370779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:44.399713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:44.405283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:44.732296Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894542598720749:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:44.732319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b20/r3tmp/tmpebDAaR/pdisk_1.dat 2025-05-05T09:41:44.746119Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30661 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:44.836100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:44.836116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:44.836134Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:44.837012Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:44.837254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:44.848472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[select-two_selects_with_diff_fields-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--ForceBlocks] >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[sampling-read--Results] [GOOD] >> test.py::test[sampling-reduce-with_premap-Results] [SKIPPED] >> test.py::test[sampling-subquery_default-default.txt-Results] >> TFlatTest::SelectRangeItemsLimit >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] >> test.py::test[join-trivial_view--Results] [GOOD] >> TFlatTest::SelectRangeItemsLimit [GOOD] >> test.py::test[join-trivial_view-off-ForceBlocks] >> TFlatTest::SelectRangeForbidNullArgs4 >> test.py::test[view-file_eval--Results] [GOOD] >> test.py::test[view-secure_eval_dyn--Results] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] >> test.py::test[blocks-date_less_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-date_not_equals--ForceBlocks] >> test.py::test[select-complex_filter_with_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Results] >> test.py::test[table_range-range_over_filter--Results] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] >> test.py::test[join-mergejoin_force_one_sorted--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted--Results] >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> test.py::test[blocks-if--Results] [GOOD] >> test.py::test[blocks-json_document_type--Results] >> test.py::test[flatten_by-flatten_few_fields--Results] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] >> TLocksTest::Range_IncorrectDot1 >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] [GOOD] >> test.py::test[select-use_cluster-default.txt-ForceBlocks] >> test.py::test[window-empty/aggregations--Results] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Results] |81.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[window-current/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-current/aggregations--Results] >> TFlatTest::LargeProxyReply >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [GOOD] >> TFlatTest::SelectRangeReverseItemsLimit >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] >> test.py::test[join-bush_in_in--Results] [GOOD] >> test.py::test[join-convert_key--Results] >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Results] [GOOD] >> test.py::test[insert-drop_sortness-desc-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-05-05T09:41:46.101504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894549851428451:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:46.101539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b18/r3tmp/tmpIRZfKX/pdisk_1.dat 2025-05-05T09:41:46.165824Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15057 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:46.240239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:46.240264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:46.241216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:46.241389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:46.260102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:46.565437Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894553782702955:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:46.565467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b18/r3tmp/tmpdgzsLN/pdisk_1.dat 2025-05-05T09:41:46.577759Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29532 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:46.668698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:46.668730Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:46.669036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:46.669771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:46.680298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> test.py::test[blocks-date_equals_scalar--Results] [GOOD] >> test.py::test[blocks-date_less--Results] >> test.py::test[pg-tpcds-q19-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> test.py::test[view-secure_eval_dyn--Results] [GOOD] >> test.py::test[view-view_with_lambda--Results] >> test.py::test[select-complex_filter_with_order-default.txt-Results] [GOOD] >> test.py::test[select-exists_false-default.txt-ForceBlocks] >> TFlatTest::LargeDatashardReply [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> test.py::test[view-standalone_view_lambda--Results] [GOOD] >> test.py::test[weak_field-weak_field_data--Results] >> test.py::test[pg-tpcds-q71-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q74-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Results] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order--ForceBlocks] |81.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] >> TLocksTest::CK_GoodLock |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] >> test.py::test[join-join_comp_map_table--Results] [GOOD] >> test.py::test[join-join_key_cmp_udf--Results] >> test.py::test[sampling-subquery_default-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-05-05T09:41:44.094013Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894545109107907:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:44.094156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b23/r3tmp/tmpI7Mtrs/pdisk_1.dat 2025-05-05T09:41:44.153703Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19178 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T09:41:44.176491Z node 1 :TX_PROXY DEBUG: actor# [1:7500894545109107954:2103] Handle TEvNavigate describe path dc-1 2025-05-05T09:41:44.176509Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108224:2251] HANDLE EvNavigateScheme dc-1 2025-05-05T09:41:44.176900Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108224:2251] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-05-05T09:41:44.185669Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108224:2251] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-05-05T09:41:44.187426Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108224:2251] Handle TEvDescribeSchemeResult Forward to# [1:7500894545109108223:2250] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:44.195140Z node 1 :TX_PROXY DEBUG: actor# [1:7500894545109107954:2103] Handle TEvProposeTransaction 2025-05-05T09:41:44.195158Z node 1 :TX_PROXY DEBUG: actor# [1:7500894545109107954:2103] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-05-05T09:41:44.228234Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:7500894545109107932:2099] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-05-05T09:41:44.228289Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-05-05T09:41:44.228322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:44.228325Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-05T09:41:44.228328Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-05T09:41:44.228330Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-05T09:41:44.228331Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-05T09:41:44.228344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:44.228400Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-05-05T09:41:44.228407Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-05T09:41:44.228419Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-05T09:41:44.228422Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-05T09:41:44.228425Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-05T09:41:44.229212Z node 1 :TX_PROXY DEBUG: actor# [1:7500894545109107954:2103] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-05-05T09:41:44.229272Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-05-05T09:41:44.229286Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:7500894545109107932:2099]) 2025-05-05T09:41:44.229309Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-05T09:41:44.229352Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-05-05T09:41:44.229357Z node 1 :TX_PROXY DEBUG: actor# [1:7500894545109107954:2103] TxId# 281474976715657 ProcessProposeTransaction 2025-05-05T09:41:44.229410Z node 1 :TX_PROXY DEBUG: actor# [1:7500894545109107954:2103] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7500894545109108242:2261] 2025-05-05T09:41:44.229451Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-05-05T09:41:44.229460Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:7500894545109107932:2099])::Execute 2025-05-05T09:41:44.229463Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-05T09:41:44.229472Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:7500894545109107932:2099])::Complete 2025-05-05T09:41:44.229497Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 1746438104093449 ResourceMaximum { Memory: 202797641728 } 2025-05-05T09:41:44.229501Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-05-05T09:41:44.229505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:44.229549Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-05-05T09:41:44.229554Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-05-05T09:41:44.229555Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-05-05T09:41:44.229571Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-05-05T09:41:44.229573Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-05-05T09:41:44.229575Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-05-05T09:41:44.229577Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-05-05T09:41:44.235150Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-05-05T09:41:44.235174Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-05-05T09:41:44.241051Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108242:2261] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-05-05T09:41:44.241080Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108242:2261] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-05-05T09:41:44.241094Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108242:2261] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-05-05T09:41:44.241247Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108242:2261] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-05-05T09:41:44.241293Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108242:2261] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-05-05T09:41:44.241309Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108242:2261] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-05-05T09:41:44.241380Z node 1 :TX_PROXY DEBUG: Actor# [1:7500894545109108242:2261] txid# 281474976715657 HANDLE EvClientConnected 2025-05-05T09:41:44.242191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:44.242256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:44.242349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-05-05T09:41:44.242396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T09:41:44.242417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2 ... sedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:44.467461Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-05T09:41:44.467464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715674:0, datashard: 72075186224037899, at schemeshard: 72057594046644480 2025-05-05T09:41:44.467467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715674:0 129 -> 240 2025-05-05T09:41:44.467502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-05T09:41:44.467520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-05T09:41:44.467534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2025-05-05T09:41:44.467540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715674:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T09:41:44.467541Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715674 datashard 72075186224037899 state PreOffline 2025-05-05T09:41:44.467553Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 Got TEvSchemaChangedResult from SS at 72075186224037899 2025-05-05T09:41:44.467625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-05-05T09:41:44.467655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715674:0 progress is 1/1 2025-05-05T09:41:44.467657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-05T09:41:44.467660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715674:0 progress is 1/1 2025-05-05T09:41:44.467661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-05T09:41:44.467664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715674, ready parts: 1/1, is published: true 2025-05-05T09:41:44.467672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7500894545109109326:2390] message: TxId: 281474976715674 2025-05-05T09:41:44.467675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2025-05-05T09:41:44.467678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715674:0 2025-05-05T09:41:44.467680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715674:0 2025-05-05T09:41:44.467698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-05-05T09:41:44.467881Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:44.467899Z node 1 :TX_DATASHARD INFO: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:44.468130Z node 1 :TX_DATASHARD INFO: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:44.468305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894545109108821 RawX2: 4503603922340126 } TabletId: 72075186224037899 State: 4 2025-05-05T09:41:44.468335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:44.468467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:44.468469Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-05-05T09:41:44.468531Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 12 TabletID: 72075186224037899 2025-05-05T09:41:44.468546Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-05-05T09:41:44.468567Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-05-05T09:41:44.468594Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-05-05T09:41:44.468630Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-05-05T09:41:44.469293Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7500894545109107932:2099] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7500894545109108148:2200] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-05-05T09:41:44.469326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-05-05T09:41:44.469385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 1 2025-05-05T09:41:44.469393Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-05-05T09:41:44.469398Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-05-05T09:41:44.469409Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-05-05T09:41:44.469416Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-05-05T09:41:44.469433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:44.469442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-05-05T09:41:44.469451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:44.469524Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037899 2025-05-05T09:41:44.469562Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037899 2025-05-05T09:41:44.469596Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-05-05T09:41:44.469613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:12 2025-05-05T09:41:44.469628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-05-05T09:41:44.469639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:44.469688Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-05-05T09:41:44.469698Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-05-05T09:41:44.470361Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2025-05-05T09:41:44.543151Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894544320143700:2139];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b23/r3tmp/tmpmHIjYq/pdisk_1.dat 2025-05-05T09:41:44.546356Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:44.553960Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8899 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:44.646143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:44.646173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:44.646556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:44.647246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:44.657399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.810754Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-05T09:41:47.819026Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-05T09:41:47.819843Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-05-05T09:41:47.828912Z node 2 :TX_PROXY ERROR: Actor# [2:7500894557205051864:5881] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-05-05T09:41:47.228833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894554347125633:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:47.228866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0e/r3tmp/tmpe7LNbs/pdisk_1.dat 2025-05-05T09:41:47.279712Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1239 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:47.311996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:47.317038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.330112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:47.330146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:47.331188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:47.685737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894555302137168:2059];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:47.685761Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0e/r3tmp/tmpQ35Z7B/pdisk_1.dat 2025-05-05T09:41:47.698682Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13271 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:47.791696Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:47.791732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:47.792192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:47.793252Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:47.795342Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.801188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::WriteSplitByPartialKeyAndRead >> test.py::test[pg-tpch-q18-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_list_nested_lambda--Results] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt-Results] [SKIPPED] >> test.py::test[lineage-process-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Results] [SKIPPED] >> test.py::test[join-anyjoin_common_nodata_keys--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--Results] >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[aggregate-group_by_ru_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--Results] >> test.py::test[join-trivial_view-off-ForceBlocks] [GOOD] >> test.py::test[join-trivial_view-off-Results] >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> TLocksTest::BrokenDupLock [GOOD] >> test.py::test[join-trivial_view-off-Results] [SKIPPED] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-ForceBlocks] >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> test.py::test[window-current/aggregations--Results] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> test.py::test[window-current/ansi_current_mixed--ForceBlocks] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> test.py::test[select-use_cluster-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-use_cluster-default.txt-Results] >> TFlatTest::SelectBigRangePerf >> test.py::test[insert-drop_sortness-desc-Results] [GOOD] |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[lineage-select_group_by_key-default.txt-Results] [SKIPPED] >> TFlatTest::SelectRangeBytesLimit >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] [GOOD] >> test.py::test[flatten_by-flatten_two_fields--Results] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] Test command err: 2025-05-05T09:41:44.757488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894544178261447:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:44.757541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b1c/r3tmp/tmpxo2wHp/pdisk_1.dat 2025-05-05T09:41:44.811856Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24084 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:44.886312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:44.886352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:44.887119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:44.887345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:44.895463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:44.915823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:44.928060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:45.218707Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894546022122027:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:45.218810Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b1c/r3tmp/tmpWSFuXm/pdisk_1.dat 2025-05-05T09:41:45.236063Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17319 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:45.323041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:45.323081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:45.323455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:45.324120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:45.324864Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:45.329258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:45.344485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:45.358123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:45.638890Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894548639323569:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:45.638929Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b1c/r3tmp/tmpG3Htxt/pdisk_1.dat 2025-05-05T09:41:45.654719Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2349 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:45.743309Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:45.743350Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:45.743381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:45.744848Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:45.744935Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:45.748949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:45.767930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:41:45.778497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:46.056534Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894553853286067:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:46.056614Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b1c/r3tmp/tmpjdFYBB/pdisk_1.dat 2025-05-05T09:41:46.067149Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23031 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:46.159652Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:46.159697Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:46.160551Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:46.160781Z node 4 :HIVE WARN: HIVE ... { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:47.637302Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:47.637347Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:47.637742Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:47.638494Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:47.641360Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.646653Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.662486Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.675098Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.032392Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894561958651263:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:48.032416Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b1c/r3tmp/tmpBAWxR3/pdisk_1.dat 2025-05-05T09:41:48.043741Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24013 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:48.136451Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:48.136500Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:48.136912Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.137594Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:48.138425Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.143822Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.158930Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.173130Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.518403Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894559090466118:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:48.520622Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b1c/r3tmp/tmpmYhDvl/pdisk_1.dat 2025-05-05T09:41:48.535192Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62447 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:48.622379Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:48.622417Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:48.622794Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.623535Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:48.624413Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.637818Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.649826Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.664336Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.023373Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894566864481574:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:49.023389Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b1c/r3tmp/tmpZWIMcn/pdisk_1.dat 2025-05-05T09:41:49.042272Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17992 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:49.131400Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:49.131434Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:49.131885Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:49.133336Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:49.143010Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.147133Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:49.173681Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.192007Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[pg-tpcds-q43-default.txt-Results] >> TFlatTest::SplitInvalidPath >> TObjectStorageListingTest::Listing >> test.py::test[blocks-json_document_type--Results] [GOOD] >> test.py::test[blocks-pg_tofrom--Results] >> test.py::test[aggregate-group_by_cube_expr_trio--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> test.py::test[tpch-q14-default.txt-Results] [GOOD] >> test.py::test[view-view_with_lambda--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] >> test.py::test[tpch-q2-default.txt-Results] >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> TLocksTest::Range_BrokenLock2 >> test.py::test[pg-tpcds-q74-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-Results] >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> test.py::test[tpch-q1-default.txt-ForceBlocks] >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2025-05-05T09:41:49.021658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894565386059299:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:49.021683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b09/r3tmp/tmpusJKzB/pdisk_1.dat 2025-05-05T09:41:49.084120Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25951 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:49.120534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.124046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:49.124080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:49.124928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:49.130159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1746438109192 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Va... (TRUNCATED) 2025-05-05T09:41:49.184941Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:49.185306Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T09:41:49.185322Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-05T09:41:49.197048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T09:41:49.197114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } waiting... 2025-05-05T09:41:49.197183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T09:41:49.197200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:49.197237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T09:41:49.197291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2025-05-05T09:41:49.197297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T09:41:49.197522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-05T09:41:49.197535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-05-05T09:41:49.197569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-05-05T09:41:49.197575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-05-05T09:41:49.197639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:49.197654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:49.197724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-05-05T09:41:49.197752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-05T09:41:49.197765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2025-05-05T09:41:49.197768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-05-05T09:41:49.197771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T09:41:49.198010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710668, at schemeshard: 72057594046644480 2025-05-05T09:41:49.198018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710668, ready parts: 0/1, is published: true 2025-05-05T09:41:49.198022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710668, at schemeshard: 72057594046644480 2025-05-05T09:41:49.198646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-05-05T09:41:49.198659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-05-05T09:41:49.198682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-05-05T09:41:49.198686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-05-05T09:41:49.198690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-05-05T09:41:49.198739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-05-05T09:41:49.198741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T09:41:49.198747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-05-05T09:41:49.198749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-05-05T09:41:49.198753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-05-05T09:41:49.198758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710668:0 2 -> 3 2025-05-05T09:41:49.198861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, o ... vTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T09:41:49.850738Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T09:41:49.850749Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T09:41:49.851025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T09:41:49.851030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T09:41:49.851039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T09:41:49.851041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T09:41:49.851688Z node 3 :TX_DATASHARD INFO: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:49.851703Z node 3 :TX_DATASHARD INFO: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:49.852368Z node 3 :TX_DATASHARD DEBUG: 72075186224037890 parts [ [72075186224037888:1:31:1:12288:402:0] ] return ack processed 2025-05-05T09:41:49.852379Z node 3 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:49.852397Z node 3 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:49.852663Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 parts [ [72075186224037888:1:42:1:12288:227:0] ] return ack processed 2025-05-05T09:41:49.852669Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:49.852693Z node 3 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:49.853020Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7500894563037351021:2314], serverId# [3:7500894563037351025:2477], sessionId# [0:0:0] 2025-05-05T09:41:49.853024Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7500894563037351023:2315], serverId# [3:7500894563037351026:2478], sessionId# [0:0:0] 2025-05-05T09:41:49.853185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894563037350559 RawX2: 4503612512274679 } TabletId: 72075186224037888 State: 4 2025-05-05T09:41:49.853203Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:49.853244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894563037350559 RawX2: 4503612512274679 } TabletId: 72075186224037888 State: 4 2025-05-05T09:41:49.853253Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:49.853705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:49.853738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:49.853694Z node 3 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:49.853728Z node 3 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:49.853847Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-05T09:41:49.853852Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-05T09:41:49.854203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894563037350788 RawX2: 4503612512274685 } TabletId: 72075186224037890 State: 4 2025-05-05T09:41:49.854216Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:49.854252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894563037350787 RawX2: 4503612512274684 } TabletId: 72075186224037892 State: 4 2025-05-05T09:41:49.854255Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:49.854649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:49.854675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:49.854785Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T09:41:49.854796Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-05-05T09:41:49.855431Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T09:41:49.855467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:49.855498Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-05T09:41:49.855502Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T09:41:49.855504Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T09:41:49.855528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:49.855572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:49.855586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:49.855602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T09:41:49.855616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T09:41:49.855630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:49.855646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:49.855647Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T09:41:49.855648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:41:49.855657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:49.855823Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T09:41:49.856207Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-05T09:41:49.856220Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-05T09:41:49.856223Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-05T09:41:49.856314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T09:41:49.856332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T09:41:49.856362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T09:41:49.856372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:49.856374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T09:41:49.856377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T09:41:49.856381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-05T09:41:49.856387Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:49.856487Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-05T09:41:49.856500Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7500894566817100993:2711], serverId# [3:7500894563037350647:2173], sessionId# [0:0:0] 2025-05-05T09:41:49.856505Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-05T09:41:49.856510Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7500894566817101511:3123], serverId# [3:7500894563037350993:2450], sessionId# [0:0:0] 2025-05-05T09:41:49.856514Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T09:41:49.856517Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7500894566817101509:3121], serverId# [3:7500894563037350994:2451], sessionId# [0:0:0] 2025-05-05T09:41:49.856533Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T09:41:49.856797Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T09:41:49.856942Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T09:41:49.857403Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T09:41:49.857428Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-05-05T09:41:49.857454Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-05T09:41:49.857467Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-05T09:41:49.857470Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-05T09:41:49.857810Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T09:41:49.857835Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-05-05T09:41:49.858032Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T09:41:49.858046Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[insert-drop_sortness-desc-Results] [GOOD] >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> test.py::test[join-convert_key--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk_eval-off-Results] [SKIPPED] >> TFlatTest::LargeProxyReplyRW [GOOD] >> TObjectStorageListingTest::MaxKeysAndSharding >> TFlatTest::SelectRangeBothLimit [GOOD] >> test.py::test[select-exists_false-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_false-default.txt-Results] >> test.py::test[weak_field-weak_field_data--Results] [GOOD] >> test.py::test[weak_field-weak_field_join--Results] >> test.py::test[aggregate-percentile_interval-default.txt-Results] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] >> TFlatTest::SplitThenMerge [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [GOOD] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] [GOOD] >> test.py::test[window-row_number_to_map-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-05-05T09:41:49.958042Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894566847660678:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:49.958439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b03/r3tmp/tmpH8IDsY/pdisk_1.dat 2025-05-05T09:41:50.026023Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1891 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:50.100507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.100545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:50.101318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:50.101505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:50.116420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.445533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894570165035428:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:50.445566Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b03/r3tmp/tmp6OqFsM/pdisk_1.dat 2025-05-05T09:41:50.459963Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10860 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:50.550591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.550633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:50.550940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:50.551544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:50.552802Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.569559Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.577021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> test.py::test[aggregate-compare_by--Results] [GOOD] >> test.py::test[aggregate-compare_tuple--Results] |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[join-equi_join_three_asterisk_eval-off-Results] [SKIPPED] >> test.py::test[sampling-subquery_expr-default.txt-Results] [GOOD] >> test.py::test[schema-limit_simple--Results] >> TLocksTest::NoLocksSet >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--Results] [GOOD] >> test.py::test[join-bush_dis_in_in--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-05-05T09:41:47.162324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894556032619098:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:47.162489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b11/r3tmp/tmpSlINrQ/pdisk_1.dat 2025-05-05T09:41:47.238435Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17112 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:47.303769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:47.303803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:47.304502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:47.306073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:47.309615Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.316893Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:47.317990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.002538Z node 1 :TX_PROXY ERROR: Actor# [1:7500894560327589947:4122] txid# 281474976716010 MergeResult Result too large TDataReq marker# P18 2025-05-05T09:41:49.002591Z node 1 :TX_PROXY ERROR: Actor# [1:7500894560327589947:4122] txid# 281474976716010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-05-05T09:41:49.271234Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894565548659336:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b11/r3tmp/tmpeujJV6/pdisk_1.dat 2025-05-05T09:41:49.274455Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:49.282589Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28325 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:49.374531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:49.374607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:49.374634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:49.375825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:49.381521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.942352Z node 2 :TX_PROXY ERROR: Actor# [2:7500894569843630003:4127] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2025-05-05T09:41:50.942375Z node 2 :TX_PROXY ERROR: Actor# [2:7500894569843630003:4127] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-05-05T09:41:49.807310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894563897913750:2137];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:49.808215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b05/r3tmp/tmp2PpshJ/pdisk_1.dat 2025-05-05T09:41:49.879266Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2114 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:49.905837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:49.905872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:49.906957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:49.953286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:49.956007Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.974416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... insert finished 1700 usec 2686 usec 1636 usec 1669 usec 1557 usec 2152 usec 1931 usec 1622 usec 1847 usec 1606 usec 2025-05-05T09:41:50.493928Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894569890054251:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:50.493954Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b05/r3tmp/tmpzwwXNr/pdisk_1.dat 2025-05-05T09:41:50.503031Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8834 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:50.597552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.597583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:50.597933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.598801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:50.631837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::Range_IncorrectDot2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-05-05T09:41:50.365608Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894567023458105:2264];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:50.365644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000afb/r3tmp/tmpdULEpv/pdisk_1.dat 2025-05-05T09:41:50.435304Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21186 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:50.471083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:50.473579Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.492342Z node 1 :FLAT_TX_SCHEMESHARD WARN: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825, tableStr: /dc-1/Dir1, tableId: , opId: 281474976710659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825 2025-05-05T09:41:50.492932Z node 1 :TX_PROXY ERROR: Actor# [1:7500894567023458472:2293] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp:825" severity: 1 } 2025-05-05T09:41:50.509791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.509822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:50.510729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:50.820736Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894569336930392:2266];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:50.820791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000afb/r3tmp/tmpkYaDtO/pdisk_1.dat 2025-05-05T09:41:50.839024Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25901 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:50.924269Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.924305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:50.924887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:50.925391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:50.932015Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.946415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.019270Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:51.019961Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:51.027357Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:51.027436Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438111047 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-05-05T09:41:51.044075Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:51.044687Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.044758Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:51.045005Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.045051Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:51.045122Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:51.045280Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.045450Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:51.045522Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:51.045665Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.045697Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:51.045760Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:51.045897Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.046031Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:51.046099Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:51.046242Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.046270Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:51.046330Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:51.046469Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.046627Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:51.046766Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:51.046999Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.047048Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:51.047126Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:51.047272Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.047309Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:51.047373Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:51.047507Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.047540Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:51.047599Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:51.047743Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:51.047775Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:51.047837Z node 2 :TX_DAT ... 80 2025-05-05T09:41:51.161665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7500894569336930796 RawX2: 4503608217307386 } Origin: 72075186224037889 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-05T09:41:51.161676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-05-05T09:41:51.161709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894573631898713 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-05T09:41:51.161716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715693, tablet: 72075186224037894, partId: 0 2025-05-05T09:41:51.161726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894573631898713 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-05T09:41:51.161733Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-05-05T09:41:51.161739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7500894573631898713 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-05-05T09:41:51.161742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, datashard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161744Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161752Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715693:0 129 -> 240 2025-05-05T09:41:51.161789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161861Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2025-05-05T09:41:51.161866Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-05-05T09:41:51.161867Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T09:41:51.161894Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2025-05-05T09:41:51.161905Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-05-05T09:41:51.161981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T09:41:51.162019Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-05-05T09:41:51.162027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-05T09:41:51.162030Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-05-05T09:41:51.162032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-05T09:41:51.162035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2025-05-05T09:41:51.162046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500894573631898936:2427] message: TxId: 281474976715693 2025-05-05T09:41:51.162054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-05-05T09:41:51.162057Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715693:0 2025-05-05T09:41:51.162060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715693:0 2025-05-05T09:41:51.162082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T09:41:51.162748Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:51.162775Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T09:41:51.163045Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:51.163063Z node 2 :TX_DATASHARD INFO: 72075186224037894 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:51.163272Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:51.163300Z node 2 :TX_DATASHARD INFO: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:51.163349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894569336930796 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-05-05T09:41:51.163371Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:51.163423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894573631898713 RawX2: 4503608217307478 } TabletId: 72075186224037894 State: 4 2025-05-05T09:41:51.163433Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:51.163473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:51.163476Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T09:41:51.163493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:51.163493Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2025-05-05T09:41:51.164360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T09:41:51.164407Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T09:41:51.164416Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-05-05T09:41:51.164442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T09:41:51.164510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-05T09:41:51.164518Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T09:41:51.164533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:51.164546Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T09:41:51.164554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:51.164556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:41:51.164568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:51.164778Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T09:41:51.164781Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-05T09:41:51.164837Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-05-05T09:41:51.164845Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 2025-05-05T09:41:51.164873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T09:41:51.164878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T09:41:51.164889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-05-05T09:41:51.164891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-05T09:41:51.164900Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [GOOD] >> TLocksFatTest::RangeSetRemove >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> TFlatTest::CrossRW >> test.py::test[select-exists_false-default.txt-Results] [GOOD] >> test.py::test[select-refselect--ForceBlocks] |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [GOOD] >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-05-05T09:41:47.047630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894556868602275:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:47.047699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b17/r3tmp/tmpT07vXD/pdisk_1.dat 2025-05-05T09:41:47.116942Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16528 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:47.185593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:47.185628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:47.186504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:47.186623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:47.191677Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.197403Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:47.198440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.263981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.276457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.534585Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894556016752273:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:47.534674Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b17/r3tmp/tmpjeWeDB/pdisk_1.dat 2025-05-05T09:41:47.558762Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6237 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:47.639642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:47.639674Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:47.639987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:47.640730Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:47.643578Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.653426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.668713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:47.682404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.012317Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894559995371225:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:48.012345Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b17/r3tmp/tmpykNkrc/pdisk_1.dat 2025-05-05T09:41:48.023871Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22236 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:48.115816Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:48.115853Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:48.116240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.116841Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:48.122199Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.138345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.153657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.490632Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894558612562334:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:48.490690Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b17/r3tmp/tmpcPAC6g/pdisk_1.dat 2025-05-05T09:41:48.505629Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23252 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:48.594515Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:48.594552Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:48.594904Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, subope ... { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:50.090125Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.090163Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:50.090525Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:50.091171Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:50.092905Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.103495Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.118616Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.133095Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.494030Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894570181651619:2232];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:50.494140Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b17/r3tmp/tmpDuJgRP/pdisk_1.dat 2025-05-05T09:41:50.507579Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29238 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:50.598579Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.598630Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:50.599063Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.600419Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:50.601451Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.630755Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.649960Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.664269Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.998754Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894569987859597:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:50.998784Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b17/r3tmp/tmp09pz8X/pdisk_1.dat 2025-05-05T09:41:51.015342Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12485 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:51.098739Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:51.098772Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:51.099156Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.099773Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:51.102821Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:51.112092Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.128891Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.140565Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.497804Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894574263091053:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:51.497838Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b17/r3tmp/tmpZVnbZz/pdisk_1.dat 2025-05-05T09:41:51.513409Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20570 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:51.598439Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:51.598495Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:51.598810Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.600109Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:51.603648Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.608601Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.623139Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.639846Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-ForceBlocks] >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:37.224413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:37.224435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:37.224439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:37.224443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:37.224453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:37.224455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:37.224462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:37.224474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:37.224541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:37.224600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:37.233878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:37.233901Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:37.236943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:37.237293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:37.237333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:37.238341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:37.238385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:37.238464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:37.238656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:37.239365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:37.239683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:37.239692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:37.239707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:37.239712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:37.239717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:37.239743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:37.240967Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:41:37.254773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:37.254878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:37.254994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:37.255058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:37.255072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:37.256203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:37.256238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:37.256298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:37.256308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:37.256311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:37.256315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:37.257017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:37.257032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:37.257037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:37.257543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:37.257556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:37.257562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:37.257570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:37.258227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:37.258684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:37.258714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:37.258850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:37.258870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:37.258878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:37.258975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:37.258984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:37.259009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:37.259018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:37.259475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:37.259484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:37.259526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:37.259531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:37.259597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:37.259604Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:37.259614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:37.259618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:37.259623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:37.259626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:37.259631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:37.259636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:37.259641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:37.259645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:37.259657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:37.259663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:37.259667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:37.259962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:37.259973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... ffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:29019 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 430CA1E0-E149-4E85-99D8-739DD84492E6 amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2025-05-05T09:41:52.013451Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2025-05-05T09:41:52.013504Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T09:41:52.013525Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:29019 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 069F962C-0558-443D-B95D-D6C49D1FDDE1 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-05-05T09:41:52.013995Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-05-05T09:41:52.014036Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T09:41:52.014046Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:29019 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CF60BFFD-2D14-4190-A8DB-FADF0E7E9DC9 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-05-05T09:41:52.014485Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-05-05T09:41:52.014493Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3453:5417], success# 1, error# , multipart# 1, uploadId# 1 2025-05-05T09:41:52.017942Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3453:5417], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:29019 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4AB3D596-28C4-4008-8C1A-86CD27007683 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-05-05T09:41:52.020151Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3453:5417], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-05-05T09:41:52.020220Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:52.022390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:52.022405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:52.022436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:52.022446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:52.022456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:52.022459Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:52.022462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:52.022466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:52.022537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:52.023855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:52.023972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:52.023982Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:52.023996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:52.024001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:52.024006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:52.024010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:52.024015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:52.024048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T09:41:52.024053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:52.024058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:52.024061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:52.024093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:52.024835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:52.024845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3438:5403] TestWaitNotification: OK eventTxId 102 >> test.py::test[join-mergejoin_with_reverse_key_order--Results] [GOOD] >> test.py::test[join-order_of_qualified-off-ForceBlocks] >> TFlatTest::SplitEmptyToMany >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-Results] >> TFlatTest::GetTabletCounters [GOOD] >> TLocksTest::BrokenSameKeyLock >> test.py::test[join-join_key_cmp_udf--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-off-Results] [SKIPPED] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-full/session_compact--Results] >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> TFlatTest::CopyTableAndCompareColumnsSchema >> test.py::test[aggregate-group_by_cube_expr_trio--Results] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-ForceBlocks] >> TFlatTest::PathSorting |81.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> test.py::test[blocks-pg_tofrom--Results] [GOOD] >> test.py::test[blocks-sort_two_mix--Results] >> TLocksTest::Range_BrokenLockMax >> TLocksTest::CK_BrokenLock [GOOD] >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] >> test.py::test[blocks-date_not_equals--ForceBlocks] [GOOD] >> test.py::test[blocks-date_not_equals--Results] >> TLocksTest::CK_Range_BrokenLock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:38.333080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:38.333108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:38.333114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:38.333120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:38.333132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:38.333136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:38.333146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:38.333159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:38.333239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:38.333304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:38.342585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:38.342605Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:38.345558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:38.345850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:38.345896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:38.347250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:38.347308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:38.347402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:38.347613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:38.348477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:38.348753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:38.348762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:38.348778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:38.348782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:38.348787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:38.348814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:38.350016Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:41:38.362948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:38.363045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:38.363103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:38.363143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:38.363151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:38.364126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:38.364152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:38.364204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:38.364212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:38.364216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:38.364220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:38.364617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:38.364625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:38.364628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:38.364889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:38.364896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:38.364899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:38.364904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:38.365339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:38.365637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:38.365668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:38.365823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:38.365840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:38.365847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:38.365895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:38.365900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:38.365922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:38.365931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:38.366326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:38.366332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:38.366367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:38.366371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:38.366436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:38.366442Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:38.366451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:38.366454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:38.366457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:38.366459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:38.366462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:38.366465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:38.366468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:38.366471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:38.366480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:38.366484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:38.366487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:38.366701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:38.366712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:52.942294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-05-05T09:41:52.942376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:52.943446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-05-05T09:41:52.943493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-05-05T09:41:52.943725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:52.943789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:52.943800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-05-05T09:41:52.943880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-05-05T09:41:52.943920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:52.947241Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3453:5417], attempt# 0 2025-05-05T09:41:52.950892Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3453:5417], sender# [1:3452:5416] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-05-05T09:41:52.953878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:52.953895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T09:41:52.953973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:52.953979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-05-05T09:41:52.954245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:52.954259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-05-05T09:41:52.954519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:52.954534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-05-05T09:41:52.954540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-05-05T09:41:52.954547Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T09:41:52.954554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-05-05T09:41:52.954594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:24042 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A5C69282-14F2-4D62-9E5A-2A85C60ADDC3 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-05-05T09:41:52.956015Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3453:5417], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-05-05T09:41:52.957374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:24042 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DB11E336-31CA-40D1-B94D-9F57BC4B344F amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-05-05T09:41:52.959417Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3453:5417], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-05-05T09:41:52.959494Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T09:41:52.959785Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:24042 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5D3410D1-7C69-49DE-94F2-8116C4A49B8A amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-05-05T09:41:52.960959Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3453:5417], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-05-05T09:41:52.960980Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3453:5417], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-05-05T09:41:52.961067Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:52.974209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:52.974243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:52.974282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:52.974298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:52.974314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:52.974319Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:52.974325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:52.974334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:52.974405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:52.975776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:52.975963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:52.975978Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:52.975997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:52.976003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:52.976009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:52.976013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:52.976020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:52.976046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T09:41:52.976055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:52.976062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:52.976068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:52.976104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:52.977157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:52.977176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3438:5403] TestWaitNotification: OK eventTxId 102 >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> TFlatTest::SelectRangeNullArgs3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-05-05T09:41:52.362363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894575719177786:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.362559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aea/r3tmp/tmpYRfxLb/pdisk_1.dat 2025-05-05T09:41:52.432499Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6312 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:52.465461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.465643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.465660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:52.467192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:52.468354Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.476802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.799329Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894579256874726:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.799381Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aea/r3tmp/tmphkPzVI/pdisk_1.dat 2025-05-05T09:41:52.821037Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9031 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:52.902635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.902675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:52.903421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.903735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:52.912039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438112972 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) |81.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[join-join_without_correlation_and_struct_access-off-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-05-05T09:41:48.444486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894562380538046:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:48.444929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0c/r3tmp/tmpDjo0EG/pdisk_1.dat 2025-05-05T09:41:48.513340Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61639 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:48.546603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:48.546629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:48.547705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:48.588215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.593946Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.598120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.662515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.671787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:48.897085Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894561046878141:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:48.897133Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0c/r3tmp/tmpSVT25s/pdisk_1.dat 2025-05-05T09:41:48.908925Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29750 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:49.001550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:49.001581Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:49.001981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:49.003262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:49.007198Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.018307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.033000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.052940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.383542Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894566139309356:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:49.383576Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0c/r3tmp/tmpllaarg/pdisk_1.dat 2025-05-05T09:41:49.399412Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14707 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:49.487703Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:49.487741Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:49.488207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:49.488724Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:49.491148Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.501765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.516515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.530217Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:49.890301Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894564974831066:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:49.890319Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0c/r3tmp/tmpeAIj1M/pdisk_1.dat 2025-05-05T09:41:49.903330Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11947 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:49.992129Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:49.992177Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:49.992550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo un ... PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:51.540307Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:51.540347Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:51.540704Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.541357Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:51.546309Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.553476Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.567648Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.581940Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.940654Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894572963061927:2067];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0c/r3tmp/tmpXXQ5kq/pdisk_1.dat 2025-05-05T09:41:51.949414Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:51.953237Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31690 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:52.044111Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.044164Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:52.044546Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.045116Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:52.045989Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.048985Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.064792Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.124857Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.452837Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894576230609947:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.453032Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0c/r3tmp/tmpiWeArH/pdisk_1.dat 2025-05-05T09:41:52.470808Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26560 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:52.557084Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.557114Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:52.557550Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.558346Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:52.567843Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:52.583315Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.595928Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.967456Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894579097575220:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.967514Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b0c/r3tmp/tmpUvjVII/pdisk_1.dat 2025-05-05T09:41:52.979935Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61568 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.072307Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.072345Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.072715Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.073275Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:53.074491Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.079767Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.093469Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.107236Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks >> TFlatTest::RejectByPerShardReadSize >> TFlatTest::AutoSplitBySize |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [GOOD] >> test.py::test[tpch-q1-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q1-default.txt-Results] >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] >> TLocksTest::Range_GoodLock0 >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-bitcast_block--Results] >> TFlatTest::PartBloomFilter [GOOD] >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> test.py::test[schema-limit_simple--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] >> TFlatTest::SelectRangeNullArgs4 [GOOD] |81.3%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-05-05T09:41:53.409501Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894580257865357:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.409651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ada/r3tmp/tmpBu795U/pdisk_1.dat 2025-05-05T09:41:53.480872Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29979 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.511114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.511141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.512145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:53.551285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.563036Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.575179Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1746438113602 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "A" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1746438113658 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "B" PathId: 4 Sche... (TRUNCATED) 2025-05-05T09:41:53.855543Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894580528254071:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.855564Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ada/r3tmp/tmpVQizG1/pdisk_1.dat 2025-05-05T09:41:53.869548Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17036 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.960271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.960298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.960632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.962158Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:53.967560Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:53.972749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.114727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::SplitEmptyAndWrite |81.3%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:41:39.172547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:39.172569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:39.172573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:39.172577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:39.172587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:39.172589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:39.172595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:39.172607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:39.172672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:39.172726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:39.182015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:41:39.182038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:39.185289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:39.185577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:39.185619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:39.186740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:39.186782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:39.186881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.187133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:39.187825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.188076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:39.188084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.188098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:39.188103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:39.188107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:39.188154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.189195Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:41:39.202894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:39.203019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.203099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:39.203157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:39.203172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.206800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.206854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:39.206982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.207000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:39.207006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:39.207012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:39.209777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.209808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:39.209817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:39.210452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.210469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.210477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.210485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.211354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:39.212020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:39.212074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:39.212297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:39.212333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:39.212343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.212425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:39.212436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:39.212476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:39.212490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:39.213074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:39.213088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:39.213142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:39.213149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:39.213230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:39.213240Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:41:39.213253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:39.213258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.213264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:41:39.213267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.213272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:41:39.213277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:41:39.213282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:41:39.213286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:41:39.213300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:41:39.213306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:41:39.213311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:41:39.213688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:41:39.213707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... ksum: } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8054 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B2E57670-ACAC-4CED-B107-6DA067FC0A1F amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2025-05-05T09:41:54.083690Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2025-05-05T09:41:54.083732Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T09:41:54.083771Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8054 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F803DA53-8E33-42BA-A8BD-81ABF2DB5A80 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-05-05T09:41:54.084402Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-05-05T09:41:54.084440Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3452:5416] 2025-05-05T09:41:54.084451Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3453:5417], sender# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8054 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 52279776-B26F-4F79-BBAD-29018C8C4341 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-05-05T09:41:54.087659Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3453:5417], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-05-05T09:41:54.087679Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3453:5417], success# 1, error# , multipart# 1, uploadId# 1 2025-05-05T09:41:54.091590Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3453:5417], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8054 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 51802CF3-6C5E-4B09-A0EA-27C9B6B7E500 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-05-05T09:41:54.100183Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3453:5417], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-05-05T09:41:54.100290Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3452:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:41:54.103777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:54.103805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-05-05T09:41:54.103853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:54.103868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-05-05T09:41:54.103882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:54.103887Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:54.103892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-05-05T09:41:54.103899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-05-05T09:41:54.103991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:54.105136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:54.105299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-05-05T09:41:54.105313Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-05-05T09:41:54.105328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:54.105333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:54.105339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-05-05T09:41:54.105342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:54.105351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-05-05T09:41:54.105387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-05-05T09:41:54.105395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-05-05T09:41:54.105401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-05-05T09:41:54.105406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-05-05T09:41:54.105446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:41:54.106285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:41:54.106299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3438:5403] TestWaitNotification: OK eventTxId 102 >> TLocksFatTest::ShardLocks [GOOD] >> TLocksTest::MultipleLocks [GOOD] >> test.py::test[flatten_by-flatten_two_fields--Results] [GOOD] >> TFlatTest::LargeDatashardReplyDistributed >> test.py::test[join-bush_dis_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in--Results] >> TFlatTest::WriteSplitKillRead >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> test.py::test[select-refselect--ForceBlocks] [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-05-05T09:41:53.891133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894581266212938:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.891190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aba/r3tmp/tmpsSdkYK/pdisk_1.dat 2025-05-05T09:41:53.955814Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18488 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:53.991122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.993796Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.002582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.027542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.027574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.028747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:54.361522Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894587431723486:2153];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.361597Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aba/r3tmp/tmp8TtpPE/pdisk_1.dat 2025-05-05T09:41:54.379448Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16847 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.466471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.466504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.467435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:54.467594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.475382Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.485693Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.487245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[select-refselect--Results] >> test.py::test[weak_field-weak_field_join--Results] [GOOD] >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> TFlatTest::Mix_DML_DDL >> test.py::test[pg-tpcds-q82-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-Results] |81.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::ReadOnlyMode >> TLocksTest::Range_BrokenLock3 [GOOD] >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TFlatTest::SplitBoundaryRead [GOOD] >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> test.py::test[window-current/ansi_current_mixed--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Results] >> TFlatTest::CopyCopiedTableAndRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-05-05T09:41:52.375595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894577114987873:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.375614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ae8/r3tmp/tmpAcwdpz/pdisk_1.dat 2025-05-05T09:41:52.429320Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18359 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:52.502496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.502523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:52.503256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.503384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:52.512309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.575634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.587071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.256839Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894580291580909:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.256899Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ae8/r3tmp/tmpH6wwnN/pdisk_1.dat 2025-05-05T09:41:53.271149Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20675 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.367287Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.367318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.367654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.368117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:53.368923Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:53.380850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.402628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:53.415960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.157546Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894588297208260:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.165117Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ae8/r3tmp/tmpeLfLhI/pdisk_1.dat 2025-05-05T09:41:54.180599Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22857 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.269429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.269461Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.269810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.273346Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:54.279439Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.285395Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.286439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:54.304145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.322119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.679304Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894585989387859:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.679378Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ae8/r3tmp/tmpchazXJ/pdisk_1.dat 2025-05-05T09:41:54.694328Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13831 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.783082Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.783122Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.783500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.784409Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.793122Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.801699Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.803288Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.818429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.830123Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> test.py::test[join-order_of_qualified-off-ForceBlocks] [GOOD] >> test.py::test[join-order_of_qualified-off-Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] [GOOD] >> test.py::test[key_filter-complex-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-05-05T09:41:51.809348Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894572086345032:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:51.809380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aed/r3tmp/tmpeSHNiy/pdisk_1.dat 2025-05-05T09:41:51.871611Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9635 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:51.951302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:51.951334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:51.952004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.952812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:51.973470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.004093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.015419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.285853Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894578099845618:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.285876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aed/r3tmp/tmp38aRWg/pdisk_1.dat 2025-05-05T09:41:52.299830Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29516 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:52.391961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.392002Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:52.392451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.393066Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:52.397101Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.407149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.427862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:52.447879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.771544Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894577325075350:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.771908Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aed/r3tmp/tmpEZsf8r/pdisk_1.dat 2025-05-05T09:41:52.789967Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18706 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:52.820053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.821730Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.833539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.871597Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.871633Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:52.872774Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:52.889693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:52.898930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.230006Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894582768153721:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.230040Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aed/r3tmp/tmpTzFFzj/pdisk_1.dat 2025-05-05T09:41:53.246806Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18457 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.334503Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.334532Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.334953Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.335527Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:53.339089Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.344466Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.360164Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.373877Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.738927Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7500894581083618636:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.739643Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aed/r3tmp/tmp9oGrNQ/pdisk_1.dat 2025-05-05T09:41:53.758459Z node 5 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27029 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... 2025-05-05T09:41:53.842933Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.842978Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.843276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.843987Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:53.844712Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.848058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.864817Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.878825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aed/r3tmp/tmpImBxh5/pdisk_1.dat 2025-05-05T09:41:54.234132Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:54.243251Z node 6 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6358 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.318265Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.318312Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.318686Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.319250Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.329813Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.341460Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.343994Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:54.355369Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.367256Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aed/r3tmp/tmpGll0uX/pdisk_1.dat 2025-05-05T09:41:54.709080Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500894584965863503:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.709887Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:54.726354Z node 7 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17207 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.812615Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.812651Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.813097Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.814871Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:54.819301Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.831398Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.839974Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.866749Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.881439Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |81.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[flatten_by-flatten_two_fields--Results] [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> test.py::test[join-order_of_qualified-off-Results] [SKIPPED] >> test.py::test[join-premap_map_cross-off-ForceBlocks] >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-ForceBlocks] >> TFlatTest::WriteSplitWriteSplit [GOOD] >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> TFlatTest::MiniKQLRanges >> test.py::test[select-refselect--Results] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-ForceBlocks] >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> test.py::test[blocks-sort_two_mix--Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Results] >> test.py::test[aggregate-group_by_full_path-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2025-05-05T09:41:54.953592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894584391096466:2274];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.953845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aa9/r3tmp/tmp1J32jM/pdisk_1.dat 2025-05-05T09:41:55.034316Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8641 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T09:41:55.053151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.053181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.054253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.099517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.103065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:55.110375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.139551Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:55.143126Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:55.145245Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:55.146276Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438115170 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-05-05T09:41:55.171921Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.171987Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.172027Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.172051Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.172265Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=0, 4 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2025-05-05T09:41:55.174341Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.32, eph 3} end=0, 4 blobs 8r (max 8), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438115170 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T09:41:55.206080Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-05-05T09:41:55.206092Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-05-05T09:41:55.206094Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-05-05T09:41:55.206136Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-05-05T09:41:55.206140Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-05-05T09:41:55.458955Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894592116381677:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aa9/r3tmp/tmpjHnaQU/pdisk_1.dat 2025-05-05T09:41:55.466131Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:55.472207Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32524 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.563258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.563293Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.563634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.564162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:55.571302Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.577414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.604311Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:55.606922Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:55.611202Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:55.612096Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld 2025-05-05T09:41:55.619268Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDes ... ode 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.639077Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.639096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037890 splitOp: 281474976715678:0 alterVersion: 1 at tablet: 72057594046644480 2025-05-05T09:41:55.639135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037891 splitOp: 281474976715678:0 alterVersion: 1 at tablet: 72057594046644480 2025-05-05T09:41:55.639207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2025-05-05T09:41:55.639255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2025-05-05T09:41:55.639278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037890 2025-05-05T09:41:55.639284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037891 2025-05-05T09:41:55.647804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-05-05T09:41:55.647827Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-05-05T09:41:55.647910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-05-05T09:41:55.647915Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-05-05T09:41:55.647919Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 3 -> 131 2025-05-05T09:41:55.647967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.647988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.647998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.648001Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T09:41:55.648006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2025-05-05T09:41:55.648081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-05-05T09:41:55.648114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2025-05-05T09:41:55.648702Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.648749Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.648764Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.648786Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.648807Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=0, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-05-05T09:41:55.650624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-05T09:41:55.650641Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-05T09:41:55.650746Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 131 -> 132 2025-05-05T09:41:55.650776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T09:41:55.650903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.650962Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T09:41:55.650971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-05-05T09:41:55.651024Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T09:41:55.651034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7500894592116382014:2233], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 3 2025-05-05T09:41:55.651041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.651044Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T09:41:55.651051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2025-05-05T09:41:55.651669Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-05-05T09:41:55.651687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-05-05T09:41:55.651689Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2025-05-05T09:41:55.651693Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-05-05T09:41:55.651697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-05T09:41:55.651715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-05-05T09:41:55.651747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-05-05T09:41:55.651809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2025-05-05T09:41:55.652332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-05-05T09:41:55.652337Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-05-05T09:41:55.652350Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-05-05T09:41:55.652353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-05T09:41:55.652356Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-05-05T09:41:55.652357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-05T09:41:55.652359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2025-05-05T09:41:55.652368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500894592116382433:2359] message: TxId: 281474976715678 2025-05-05T09:41:55.652373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-05-05T09:41:55.652377Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715678:0 2025-05-05T09:41:55.652379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715678:0 2025-05-05T09:41:55.652428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T09:41:55.652507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.652520Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438115639 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-05-05T09:41:50.773730Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894567964455661:2266];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:50.773854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af7/r3tmp/tmpMVrRM8/pdisk_1.dat 2025-05-05T09:41:50.839805Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32409 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:50.911425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.911456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:50.912095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:50.913459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:50.922691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.986209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:50.998866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.226231Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894571775263701:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:51.226287Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af7/r3tmp/tmpCm5EHH/pdisk_1.dat 2025-05-05T09:41:51.243401Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15653 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:51.331217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:51.331350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:51.331643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.332324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:51.335641Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.342788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.359427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:51.371569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:51.704402Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894574816347187:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:51.704426Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af7/r3tmp/tmpjQ0XJB/pdisk_1.dat 2025-05-05T09:41:51.715249Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24708 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:51.808368Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:51.808405Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:51.808775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.809561Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:51.819346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.834438Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:51.852184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.212872Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894579446507748:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.213077Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af7/r3tmp/tmpIh2tZ8/pdisk_1.dat 2025-05-05T09:41:52.227577Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25250 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:52.316948Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.316985Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:52.317430Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.319003Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:52.320733Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, ... rsion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.840091Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.840121Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.840484Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.841020Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:53.844108Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.848188Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.863265Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.876754Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.252959Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894584179777118:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.253255Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af7/r3tmp/tmpWZD94i/pdisk_1.dat 2025-05-05T09:41:54.273877Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10513 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.357251Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.357294Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.357733Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.360023Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:54.375642Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.383568Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.388263Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.403930Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.431821Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af7/r3tmp/tmprB7pX1/pdisk_1.dat 2025-05-05T09:41:54.766239Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:54.771189Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64667 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:54.852278Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.852310Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.852613Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.853435Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:54.855009Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:54.863409Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.883774Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.896140Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.275168Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894591670452949:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:55.275198Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af7/r3tmp/tmpLNGIol/pdisk_1.dat 2025-05-05T09:41:55.294742Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20876 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.381144Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.381186Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:55.381612Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.382233Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:55.383343Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.407537Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:55.408981Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.420688Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.432223Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::SetLockFail >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 |81.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[weak_field-weak_field_join--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-05-05T09:41:55.690113Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894589298433518:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:55.691004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a9d/r3tmp/tmpk9lLo1/pdisk_1.dat 2025-05-05T09:41:55.764765Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32164 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T09:41:55.791783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.791818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.793045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.802877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:55.811038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.874233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2025-05-05T09:41:55.883901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.899104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:55.935187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy >> test.py::test[blocks-date_less--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar--Results] >> test.py::test[blocks-bitcast_block--Results] [GOOD] >> test.py::test[blocks-block_input_sys_columns--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg--Results] >> TFlatTest::CopyTableAndRead >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-05-05T09:41:55.460788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894591448192752:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:55.460942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aa1/r3tmp/tmpSp5SLO/pdisk_1.dat 2025-05-05T09:41:55.530428Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29995 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T09:41:55.567198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.567246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.571434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.575327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.580589Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.584931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.614351Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:55.615392Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:55.621815Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:55.622626Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-05-05T09:41:55.629934Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1746438115646 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-05-05T09:41:55.649003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T09:41:55.649080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-05-05T09:41:55.649162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:55.649176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T09:41:55.649179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T09:41:55.649230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T09:41:55.649294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-05-05T09:41:55.649307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T09:41:55.649564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-05-05T09:41:55.649584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-05-05T09:41:55.649619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.649631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-05-05T09:41:55.649719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:55.649740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:55.649750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:55.649879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-05T09:41:55.649902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-05-05T09:41:55.649913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2025-05-05T09:41:55.649923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2025-05-05T09:41:55.649927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T09:41:55.649929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2025-05-05T09:41:55.649931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 waiting... 2025-05-05T09:41:55.650625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-05-05T09:41:55.650642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T09:41:55.650664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281 ... Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.168476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894592748160262 RawX2: 4503608217307455 } TabletId: 72075186224037890 State: 4 2025-05-05T09:41:56.168479Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.168508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894592748160465 RawX2: 4503608217307483 } TabletId: 72075186224037894 State: 4 2025-05-05T09:41:56.168677Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.168702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894592748160465 RawX2: 4503608217307483 } TabletId: 72075186224037894 State: 4 2025-05-05T09:41:56.168704Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.168715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894592748159929 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-05T09:41:56.168718Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.168729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894592748160463 RawX2: 4503608217307482 } TabletId: 72075186224037893 State: 4 2025-05-05T09:41:56.168732Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.168746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894592748159924 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-05-05T09:41:56.168749Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.168761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894592748160263 RawX2: 4503608217307456 } TabletId: 72075186224037891 State: 4 2025-05-05T09:41:56.168775Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.168801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.168982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-05T09:41:56.169073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T09:41:56.169124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T09:41:56.169171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T09:41:56.169217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:56.169266Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T09:41:56.169270Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-05-05T09:41:56.169443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T09:41:56.169447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-05T09:41:56.169453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T09:41:56.169457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:56.169459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T09:41:56.169462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:56.169468Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-05-05T09:41:56.169471Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-05T09:41:56.169473Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T09:41:56.169500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-05-05T09:41:56.169503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-05T09:41:56.169511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-05-05T09:41:56.169514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T09:41:56.169516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T09:41:56.169520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-05-05T09:41:56.169521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-05-05T09:41:56.169797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T09:41:56.169856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:56.169889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:56.169891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:41:56.169900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:56.170425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T09:41:56.170428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T09:41:56.170435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T09:41:56.170437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T09:41:56.170443Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:56.170480Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T09:41:56.170483Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found >> TFlatTest::ShardFreezeRejectBadProtobuf |81.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> TLocksTest::Range_IncorrectNullDot1 >> TFlatTest::MergeEmptyAndWrite [GOOD] >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> test.py::test[aggregate-compare_tuple--Results] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-Results] >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> test.py::test[blocks-date_not_equals--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-05-05T09:41:55.556804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894589472416493:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:55.556826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a9f/r3tmp/tmpjEXMo1/pdisk_1.dat 2025-05-05T09:41:55.629587Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:55.663165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.663198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:20350 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T09:41:55.667363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.708340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:55.717596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.747351Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:55.749721Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:55.756007Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:55.757729Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438115779 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) Copy TableOld to Table 2025-05-05T09:41:55.778044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T09:41:55.778136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.778240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T09:41:55.778250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-05T09:41:55.778252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:55.778257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T09:41:55.778260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T09:41:55.778294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-05T09:41:55.778317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T09:41:55.778498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T09:41:55.778506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-05T09:41:55.778675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:55.778716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-05T09:41:55.778770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T09:41:55.778780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T09:41:55.778810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-05T09:41:55.778840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T09:41:55.778844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894589472416995:2238], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-05-05T09:41:55.778853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894589472416995:2238], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-05-05T09:41:55.778867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.778875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-05T09:41:55.779008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:55.779049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:55.779455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T09:41:55.779468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T09:41:55.779471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-05T09:41:55.779474Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-05T09:41:55.779478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-05T09:41:55.779537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T09:41:55.779544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T09:41:55.779545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-05T09:41:55.779547Z node 1 :FLAT_TX_SCHEMESHARD INFO ... 4046644480 2025-05-05T09:41:56.680994Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-05-05T09:41:56.681019Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7500894594377549684:2700], serverId# [2:7500894594377549686:3436], sessionId# [0:0:0] 2025-05-05T09:41:56.681021Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037890 state PreOffline 2025-05-05T09:41:56.681024Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-05-05T09:41:56.681037Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:56.681079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-05T09:41:56.681115Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-05-05T09:41:56.681117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-05-05T09:41:56.681119Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-05-05T09:41:56.681120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-05-05T09:41:56.681122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715784, ready parts: 1/1, is published: true 2025-05-05T09:41:56.681131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500894594377549659:2697] message: TxId: 281474976715784 2025-05-05T09:41:56.681134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-05-05T09:41:56.681137Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715784:0 2025-05-05T09:41:56.681139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715784:0 2025-05-05T09:41:56.681159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-05T09:41:56.681233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894594377547942 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-05-05T09:41:56.681246Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.681330Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T09:41:56.681339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 Check that tablet 72075186224037888 was deleted 2025-05-05T09:41:56.682022Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:56.682045Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:56.682323Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:56.682352Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-05-05T09:41:56.682362Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-05-05T09:41:56.682450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T09:41:56.682546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:56.682576Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:56.682597Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T09:41:56.682603Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7500894594377548067:2391], serverId# [2:7500894594377548068:2392], sessionId# [0:0:0] 2025-05-05T09:41:56.682616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:56.682619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:41:56.682634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T09:41:56.682798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T09:41:56.682800Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found Check that tablet 72075186224037889 was deleted 2025-05-05T09:41:56.682804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 Check that tablet 72075186224037890 was deleted 2025-05-05T09:41:56.682823Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:56.683078Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:56.683086Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T09:41:56.683126Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T09:41:56.683414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894594377548230 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-05-05T09:41:56.683425Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.683485Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-05-05T09:41:56.683539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894594377548229 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2025-05-05T09:41:56.683544Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:56.683582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.683618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:56.683680Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T09:41:56.683694Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T09:41:56.683720Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-05T09:41:56.684181Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) 2025-05-05T09:41:56.684422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:56.684499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T09:41:56.684537Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T09:41:56.684551Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7500894594377549538:3309], serverId# [2:7500894594377549539:3310], sessionId# [0:0:0] 2025-05-05T09:41:56.684556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:56.684561Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7500894594377548310:2547], serverId# [2:7500894594377548318:2555], sessionId# [0:0:0] 2025-05-05T09:41:56.684566Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T09:41:56.684578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T09:41:56.684598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:56.684601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T09:41:56.684611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:56.684663Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7500894594377548418:2624], serverId# [2:7500894594377548419:2625], sessionId# [0:0:0] 2025-05-05T09:41:56.684667Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7500894594377548309:2546], serverId# [2:7500894594377548319:2556], sessionId# [0:0:0] 2025-05-05T09:41:56.684740Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T09:41:56.684762Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-05-05T09:41:56.684973Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T09:41:56.684978Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T09:41:56.684988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T09:41:56.684992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T09:41:56.685002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:56.685005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T09:41:56.685012Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:56.685149Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T09:41:56.685172Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 |81.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--ForceBlocks] >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> TFlatTest::CopyTableAndDropCopy [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q03-default.txt-Results] >> TLocksTest::GoodDupLock >> TLocksTest::SetEraseSet [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--ForceBlocks] >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> test.py::test[join-bush_dis_in_in--Results] [GOOD] >> test.py::test[join-do_not_suppres_equijoin_input_sorts--ForceBlocks] [SKIPPED] >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] [SKIPPED] >> test.py::test[join-full_equal_null-off-ForceBlocks] >> TLocksTest::BrokenSameShardLock [GOOD] >> TLocksTest::Range_Pinhole ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-05-05T09:41:56.536423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894596213881805:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:56.536606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a91/r3tmp/tmpXNOKoK/pdisk_1.dat 2025-05-05T09:41:56.617589Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:56.637893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.637924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.639034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29671 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:56.657475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:56.661829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.014252Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894597758102483:2254];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.014293Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a91/r3tmp/tmpUZC2Ch/pdisk_1.dat 2025-05-05T09:41:57.027222Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2020 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.118267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.118304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.118829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.119264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.121173Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.124965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.149877Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1297 647 2154)b }, ecr=1.000 2025-05-05T09:41:57.149966Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1171 521 2626)b }, ecr=1.000 2025-05-05T09:41:57.157446Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1653 647 6413)b }, ecr=1.000 2025-05-05T09:41:57.159308Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2406 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438117186 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-05-05T09:41:57.169465Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:57.169981Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.170033Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:57.170261Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.170485Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:57.170564Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:57.170711Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.170734Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:57.170789Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:57.170953Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.171136Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:57.171216Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:57.171367Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.171389Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:57.171444Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:57.171573Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.171737Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:57.171801Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:57.171934Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.171954Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:57.172002Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:57.172128Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.172285Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:57.172347Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:57.172484Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.172502Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:57.172554Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:57.172679Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.172847Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:57.172915Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:57.173078Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.173098Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:57.173150Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:57.173277Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-05-05T09:41:57.173443Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:41:57.173508Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-05-05T09:41:57.173692Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T09:41:57.173699Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-05-05T09:41:57.173729Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:41:57.173785Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-05-05T09:41:57.173887Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T09:41:57.173890Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-05T09:41:57.185900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: ... MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-05-05T09:41:57.272074Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-05T09:41:57.272111Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-05-05T09:41:57.272128Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-05-05T09:41:57.272135Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-05-05T09:41:57.272141Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746438117312:281474976715687] in PlanQueue unit at 72075186224037890 2025-05-05T09:41:57.272191Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 loaded tx from db 1746438117312:281474976715687 keys extracted: 0 2025-05-05T09:41:57.272229Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T09:41:57.272252Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-05-05T09:41:57.272275Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037890 2025-05-05T09:41:57.272379Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T09:41:57.272672Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1746438117312} 2025-05-05T09:41:57.272691Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-05-05T09:41:57.272704Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-05-05T09:41:57.272718Z node 2 :TX_DATASHARD DEBUG: Complete [1746438117312 : 281474976715687] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7500894597758102622:2149], exec latency: 0 ms, propose latency: 0 ms 2025-05-05T09:41:57.272727Z node 2 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715687 state PreOffline TxInFly 0 2025-05-05T09:41:57.272736Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-05T09:41:57.272849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1746438117312 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 233 } } 2025-05-05T09:41:57.272868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-05-05T09:41:57.272898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1746438117312 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 233 } } 2025-05-05T09:41:57.272920Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1746438117312 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 233 } } 2025-05-05T09:41:57.273085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.273155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894597758103188 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-05T09:41:57.273158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-05-05T09:41:57.273172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894597758103188 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-05T09:41:57.273176Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-05-05T09:41:57.273184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7500894597758103188 RawX2: 4503608217307444 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-05-05T09:41:57.273195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:57.273198Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.273202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-05-05T09:41:57.273207Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715687:0 129 -> 240 2025-05-05T09:41:57.273270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.273294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.273294Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2025-05-05T09:41:57.273297Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T09:41:57.273302Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-05-05T09:41:57.273392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:57.273422Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-05-05T09:41:57.273425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-05T09:41:57.273428Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-05-05T09:41:57.273430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-05T09:41:57.273433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2025-05-05T09:41:57.273444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500894597758103435:2393] message: TxId: 281474976715687 2025-05-05T09:41:57.273447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-05-05T09:41:57.273451Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715687:0 2025-05-05T09:41:57.273453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715687:0 2025-05-05T09:41:57.273477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: 2025-05-05T09:41:57.274693Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-05-05T09:41:57.274729Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T09:41:57.275299Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:57.275538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894597758103188 RawX2: 4503608217307444 } TabletId: 72075186224037890 State: 4 2025-05-05T09:41:57.275563Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.275677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.275835Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T09:41:57.276252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:57.276335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:57.276392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:57.276399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:41:57.276412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:57.276538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:57.276549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T09:41:57.276560Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:57.276804Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T09:41:57.276817Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7500894597758103306:2577], serverId# [2:7500894597758103307:2578], sessionId# [0:0:0] 2025-05-05T09:41:57.276940Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T09:41:57.277034Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T09:41:57.277064Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 >> TCancelTx::CrossShardReadOnly >> TLocksTest::BrokenLockUpdate >> TFlatTest::CopyTableAndDropOriginal [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-05-05T09:41:55.857024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894590295076081:2137];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:55.857920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a9c/r3tmp/tmpXNfqfV/pdisk_1.dat 2025-05-05T09:41:55.933155Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12609 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:56.000302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.000330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.003402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:56.003901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.022622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T09:41:56.022696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.022732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-05-05T09:41:56.022749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-05-05T09:41:56.022767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T09:41:56.022834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-05T09:41:56.022850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:56.023786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-05-05T09:41:56.023819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2025-05-05T09:41:56.023885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T09:41:56.023892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-05T09:41:56.023947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T09:41:56.023973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T09:41:56.023977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894590295076658:2371], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-05-05T09:41:56.023980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894590295076658:2371], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-05-05T09:41:56.023988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.023994Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T09:41:56.024017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 2025-05-05T09:41:56.024612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 waiting... 2025-05-05T09:41:56.025097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-05T09:41:56.025116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-05T09:41:56.025118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-05-05T09:41:56.025122Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-05-05T09:41:56.025127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-05-05T09:41:56.025178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-05T09:41:56.025198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-05-05T09:41:56.025204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-05-05T09:41:56.025206Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-05-05T09:41:56.025208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T09:41:56.025215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-05-05T09:41:56.025900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:56.025922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-05-05T09:41:56.025926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:56.026072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715658 msg type: 269090816 2025-05-05T09:41:56.026141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715658, partId: 4294967295, tablet: 72057594046316545 2025-05-05T09:41:56.026171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-05T09:41:56.026183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-05-05T09:41:56.026783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1746438116073, transactions count in step: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:56.026817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746438116073 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-05-05T09:41:56.026826Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1746438116073, at schemeshard: 72057594046644480 2025-05-05T09:41:56.026857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-05-05T09:41:56.026894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-05-05T09:41:56.026906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:56.027336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T09:41:56.027346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-05-05T09:41:56.027390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T09:41:56.027411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T09:41:56.027415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894590295076658:2371], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-05-05T09:41:56.027418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894590295076658:2371], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-05-05T09:41:56.027424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.027428Z node 1 : ... y{4194304 dyn 0} 2025-05-05T09:41:56.067372Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:13} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} hope 1 -> done Change{12, redo 124b alter 0b annex 0, ~{ 4, 0 } -{ }, 0 gb} 2025-05-05T09:41:56.067375Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:13} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} release 4194304b of static, Memory{0 dyn 0} 2025-05-05T09:41:56.067576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-05T09:41:56.067584Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-05T09:41:56.067586Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-05T09:41:56.067592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-05T09:41:56.067595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2025-05-05T09:41:56.067598Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-05-05T09:41:56.067601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-05-05T09:41:56.067623Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{37, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-05T09:41:56.067626Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-05T09:41:56.067645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-05T09:41:56.067648Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-05-05T09:41:56.067651Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-05T09:41:56.067655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-05-05T09:41:56.067657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2025-05-05T09:41:56.067658Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-05-05T09:41:56.067659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T09:41:56.067665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2025-05-05T09:41:56.067668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7500894594590044316:2302] 2025-05-05T09:41:56.067673Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{38, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-05-05T09:41:56.067675Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-05-05T09:41:56.068541Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T09:41:56.068546Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:13:1:24576:107:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T09:41:56.068551Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T09:41:56.068554Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:122:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T09:41:56.068558Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T09:41:56.068561Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:119:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T09:41:56.068566Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T09:41:56.068569Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-05-05T09:41:56.068587Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} commited cookie 1 for step 13 2025-05-05T09:41:56.068603Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-05-05T09:41:56.068624Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-05-05T09:41:56.068627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-05-05T09:41:56.068631Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-05-05T09:41:56.068633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-05-05T09:41:56.068731Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received poison pill [1:7500894594590044317:2302] 2025-05-05T09:41:56.068754Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:7500894594590044317:2302] 2025-05-05T09:41:56.068777Z node 1 :PIPE_SERVER DEBUG: [72057594046644480] Got PeerClosed from# [1:7500894594590044317:2302] 2025-05-05T09:41:56.072991Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-05-05T09:41:56.073010Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-05-05T09:41:56.073043Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-05-05T09:41:56.073049Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:14} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-05-05T09:41:56.073079Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [1:7500894590295076631:2340] 2025-05-05T09:41:56.073082Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [1:7500894590295076631:2340] 2025-05-05T09:41:56.073088Z node 1 :PIPE_SERVER DEBUG: [72057594046382081] HandleSend Sender# [1:7500894590295076620:2340] EventType# 269156352 2025-05-05T09:41:56.207539Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] send [1:7500894590295076522:2079] 2025-05-05T09:41:56.207554Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] push event to server [1:7500894590295076522:2079] 2025-05-05T09:41:56.207601Z node 1 :PIPE_SERVER DEBUG: [72057594037936129] HandleSend Sender# [1:7500894590295075983:2079] EventType# 272039936 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a9c/r3tmp/tmpWgYYmY/pdisk_1.dat 2025-05-05T09:41:56.367257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:56.369562Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4950 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:56.458066Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.458095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.458400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.459111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:56.462854Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.465964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.520913Z node 2 :TX_PROXY ERROR: Actor# [2:7500894601159623193:2598] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000077 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-05-05T09:41:57.520946Z node 2 :TX_PROXY ERROR: Actor# [2:7500894601159623193:2598] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2025-05-05T09:41:56.113819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894594849917341:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:56.114185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a95/r3tmp/tmpOfoEFt/pdisk_1.dat 2025-05-05T09:41:56.186179Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10223 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:56.220920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.223868Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.236632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.258788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.258819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.259978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:56.317818Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:56.318484Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:56.325415Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:56.326561Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-05-05T09:41:56.356782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T09:41:56.356855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.356957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T09:41:56.356967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-05T09:41:56.356970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:56.356974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T09:41:56.356977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T09:41:56.357010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-05T09:41:56.357028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T09:41:56.357195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T09:41:56.357202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-05T09:41:56.357364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:56.357392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-05T09:41:56.357446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T09:41:56.357456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T09:41:56.357483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-05T09:41:56.357510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T09:41:56.357520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894594849917848:2240], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-05-05T09:41:56.357529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894594849917848:2240], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 waiting... 2025-05-05T09:41:56.357542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.357555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-05T09:41:56.357639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:56.357658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:56.358037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-05-05T09:41:56.358058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-05-05T09:41:56.358059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-05-05T09:41:56.358063Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-05T09:41:56.358067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-05T09:41:56.358119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-05-05T09:41:56.358131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-05-05T09:41:56.358132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-05-05T09:41:56.358135Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-05-05T09:41:56.358136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-05-05T09:41:56.358143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-05-05T09:41:56.358177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-05T09:41:56.358207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-05-05T09:41:56.358223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-05-05T09:41:56.358232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T09:41:56.358234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-05-05T09:41:56.358253Z no ... ted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-05-05T09:41:57.328232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-05-05T09:41:57.328234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-05-05T09:41:57.328236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-05-05T09:41:57.328238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-05-05T09:41:57.328248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.328256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.328260Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:57.328399Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-05-05T09:41:57.328409Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:7500894597467954527:2738], serverId# [3:7500894597467954529:2740], sessionId# [0:0:0] 2025-05-05T09:41:57.328413Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037893 reason = ReasonStop 2025-05-05T09:41:57.328416Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [3:7500894597467954528:2739], serverId# [3:7500894597467954530:2741], sessionId# [0:0:0] 2025-05-05T09:41:57.328421Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T09:41:57.328425Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-05T09:41:57.328578Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-05-05T09:41:57.328581Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-05-05T09:41:57.328672Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-05-05T09:41:57.328709Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-05-05T09:41:57.329121Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037893 2025-05-05T09:41:57.329141Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-05-05T09:41:57.329503Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037892 from 72075186224037890 is reset 2025-05-05T09:41:57.329512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T09:41:57.329515Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037893 from 72075186224037891 is reset 2025-05-05T09:41:57.329526Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-05T09:41:57.329532Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7500894597467954011:2383], serverId# [3:7500894597467954012:2384], sessionId# [0:0:0] 2025-05-05T09:41:57.329585Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T09:41:57.329586Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T09:41:57.329596Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-05-05T09:41:57.329636Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:57.329655Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:57.329679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894597467954202 RawX2: 4503612512274741 } TabletId: 72075186224037890 State: 4 2025-05-05T09:41:57.329684Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.329701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894597467954204 RawX2: 4503612512274742 } TabletId: 72075186224037891 State: 4 2025-05-05T09:41:57.329704Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.329724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:57.329726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:41:57.329726Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-05-05T09:41:57.329738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T09:41:57.329822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T09:41:57.329826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T09:41:57.329848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T09:41:57.329850Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T09:41:57.329863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.329873Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.329878Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T09:41:57.329879Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:57.329883Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7500894597467954021:2390], serverId# [3:7500894597467954022:2391], sessionId# [0:0:0] 2025-05-05T09:41:57.329890Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T09:41:57.329893Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T09:41:57.329949Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T09:41:57.329958Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T09:41:57.330001Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-05-05T09:41:57.335181Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:57.335279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T09:41:57.335339Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:57.335372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T09:41:57.335393Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:57.335396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T09:41:57.335412Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:57.335720Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:57.335729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T09:41:57.335748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T09:41:57.335753Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T09:41:57.335765Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:57.337192Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T09:41:57.337221Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [3:7500894597467954284:2563], serverId# [3:7500894597467954287:2566], sessionId# [0:0:0] 2025-05-05T09:41:57.337232Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T09:41:57.337241Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:7500894597467954276:2558], serverId# [3:7500894597467954277:2559], sessionId# [0:0:0] 2025-05-05T09:41:57.338959Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-05-05T09:41:57.338998Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-05-05T09:41:57.339203Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T09:41:57.339242Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-05-05T09:41:57.339760Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T09:41:57.339774Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-05-05T09:41:57.627161Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-05-05T09:41:57.628386Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-05-05T09:41:57.628928Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-05-05T09:41:57.629158Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-05-05T09:41:57.629322Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-05T09:41:57.629444Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TLocksTest::Range_CorrectDot [GOOD] >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> test.py::test[window-current/ansi_current_mixed--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--ForceBlocks] >> TLocksTest::GoodSameKeyLock >> TLocksTest::CK_Range_BrokenLockInf [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-05-05T09:41:56.715164Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894594085549808:2071];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:56.715410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a8d/r3tmp/tmp1mvOdS/pdisk_1.dat 2025-05-05T09:41:56.773413Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21531 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:56.843485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.843520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.844445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.845668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:56.851236Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.859314Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:56.864308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.932157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.944315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.957969Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715662: Validate (783): Key validation status: 3 2025-05-05T09:41:56.958071Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550668:2491] txid# 281474976715662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T09:41:56.958083Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550668:2491] txid# 281474976715662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T09:41:56.958087Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550668:2491] txid# 281474976715662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T09:41:56.958628Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715663: Validate (783): Key validation status: 3 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-05-05T09:41:56.958656Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550690:2498] txid# 281474976715663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T09:41:56.958664Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550690:2498] txid# 281474976715663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T09:41:56.958666Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550690:2498] txid# 281474976715663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T09:41:56.959115Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715664: Validate (783): Key validation status: 3 2025-05-05T09:41:56.959152Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550697:2502] txid# 281474976715664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T09:41:56.959163Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550697:2502] txid# 281474976715664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T09:41:56.959166Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550697:2502] txid# 281474976715664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T09:41:56.959628Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715665: Validate (783): Key validation status: 3 2025-05-05T09:41:56.959655Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550703:2505] txid# 281474976715665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T09:41:56.959663Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550703:2505] txid# 281474976715665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T09:41:56.959665Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594085550703:2505] txid# 281474976715665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-05-05T09:41:57.172490Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894597605502164:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.172760Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a8d/r3tmp/tmpMycf5n/pdisk_1.dat 2025-05-05T09:41:57.191341Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2514 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.277014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.277050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.277354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.278724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.280061Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.294375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.307985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.322079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.652723Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894600474296143:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.652744Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a8d/r3tmp/tmpu213ec/pdisk_1.dat 2025-05-05T09:41:57.667986Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25063 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.757035Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.757437Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.757467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.758386Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.762342Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.771533Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:57.776260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.806406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.819500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-05-05T09:41:53.299240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894581208632952:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.299272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000add/r3tmp/tmp3DdnW9/pdisk_1.dat 2025-05-05T09:41:53.365261Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21877 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.399589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.399634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.400679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:53.428480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.434966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.447608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:53.448820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438113546 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1746438113567 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot... (TRUNCATED) 2025-05-05T09:41:53.526544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715661 CreateStep: 1746438113630 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715662 CreateStep: 1746438113644 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) 2025-05-05T09:41:53.599295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1746438113658 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715664 CreateStep: 1746438113707 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-05-05T09:41:53.667487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715665 CreateStep: 1746438113721 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_4_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4_Copy" PathId: 10 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715666 CreateStep: 1746438113735 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: fal ... 46644480 2025-05-05T09:41:57.949433Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2025-05-05T09:41:57.949496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.949526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.949549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.949558Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-05-05T09:41:57.949566Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-05-05T09:41:57.949568Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037895 state Ready 2025-05-05T09:41:57.949573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-05-05T09:41:57.949576Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-05-05T09:41:57.949578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-05-05T09:41:57.949578Z node 2 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-05-05T09:41:57.949580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715686, ready parts: 1/1, is published: true 2025-05-05T09:41:57.949590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500894601269034125:2406] message: TxId: 281474976715686 2025-05-05T09:41:57.949594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-05-05T09:41:57.949598Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-05-05T09:41:57.949600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715686:0 2025-05-05T09:41:57.949613Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037894 state Ready 2025-05-05T09:41:57.949627Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-05-05T09:41:57.949632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-05-05T09:41:57.955587Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:7500894601269034164:3002], serverId# [2:7500894601269034165:3003], sessionId# [0:0:0] 2025-05-05T09:41:57.955651Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T09:41:57.956218Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T09:41:57.956244Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T09:41:57.957371Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [2:7500894601269034174:3009], serverId# [2:7500894601269034175:3010], sessionId# [0:0:0] 2025-05-05T09:41:57.957413Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T09:41:57.957779Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T09:41:57.957814Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T09:41:57.958637Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T09:41:57.959003Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T09:41:57.959033Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T09:41:57.959791Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T09:41:57.961012Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T09:41:57.961039Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T09:41:57.961766Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T09:41:57.962110Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T09:41:57.962123Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T09:41:57.962712Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:57.962867Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T09:41:57.962878Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-05-05T09:41:57.963218Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T09:41:57.963576Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T09:41:57.963590Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T09:41:57.963864Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:57.963952Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T09:41:57.963966Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-05-05T09:41:57.964498Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T09:41:57.964793Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T09:41:57.964811Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T09:41:57.965511Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T09:41:57.965782Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T09:41:57.965798Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T09:41:57.966414Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T09:41:57.966668Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T09:41:57.966685Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T09:41:57.967571Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T09:41:57.967831Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T09:41:57.967851Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T09:41:57.968585Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T09:41:57.968909Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T09:41:57.968920Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T09:41:57.969637Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:57.969966Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T09:41:57.969976Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-05-05T09:41:57.970027Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T09:41:57.970536Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T09:41:57.970556Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T09:41:57.971312Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-05-05T09:41:57.971618Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-05-05T09:41:57.971627Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-05-05T09:41:57.971669Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T09:41:57.972711Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T09:41:57.972731Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T09:41:57.973575Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T09:41:57.976300Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T09:41:57.976334Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-05-05T09:41:57.977441Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-05-05T09:41:57.978383Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-05-05T09:41:57.978409Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-05-05T09:41:57.979411Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-05-05T09:41:57.980450Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-05-05T09:41:57.980487Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2025-05-05T09:41:57.980789Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-05-05T09:41:57.980919Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-05-05T09:41:57.981027Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-05-05T09:41:57.981139Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-05-05T09:41:57.981264Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-05-05T09:41:57.981328Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TLocksFatTest::PointSetBreak >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-05-05T09:41:57.110525Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894600403345439:2069];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.110751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a8a/r3tmp/tmp8Uc7fT/pdisk_1.dat 2025-05-05T09:41:57.186169Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19257 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T09:41:57.213167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.213195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.214277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.255319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.263312Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.287235Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:57.292837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.345322Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:57.347974Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:57.354173Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:57.356006Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-05-05T09:41:57.387544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-05-05T09:41:57.387618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.387740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T09:41:57.387752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-05-05T09:41:57.387754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:41:57.387760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T09:41:57.387763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T09:41:57.387792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-05-05T09:41:57.387811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-05-05T09:41:57.388034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T09:41:57.388041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-05-05T09:41:57.388231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:57.388263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-05-05T09:41:57.388330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-05-05T09:41:57.388339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-05-05T09:41:57.388383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-05-05T09:41:57.388399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-05-05T09:41:57.388403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894600403345927:2235], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-05-05T09:41:57.388407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7500894600403345927:2235], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-05-05T09:41:57.388415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.388423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-05-05T09:41:57.388523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-05-05T09:41:57.388569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-05-05T09:41:57.388916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T09:41:57.388939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T09:41:57.388941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-05T09:41:57.388945Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-05-05T09:41:57.388949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-05-05T09:41:57.388999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T09:41:57.389009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2025-05-05T09:41:57.389010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2025-05-05T09:41:57.389012Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-05-05T09:41:57.389031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-05-05T09:41:57.389039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715676, ready parts: 0/1, is published: true 2025-05-05T09:41:57.389059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-05-05T09:41:57.389083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-05-05T09:41:57.389107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715676, partId: 0, tablet: 72057594037968897 2025-05-05T09:41:57.389115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:3, partId: 0 2025-05-05T09:41:57.389118Z node 1 : ... 8337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894600667732661 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-05T09:41:57.778345Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7500894600667733198:2376], serverId# [2:7500894600667733202:2686], sessionId# [0:0:0] 2025-05-05T09:41:57.778350Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.778381Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:57.778384Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-05-05T09:41:57.778387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894600667732659 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-05-05T09:41:57.778389Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.778399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894600667732971 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-05-05T09:41:57.778401Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.778411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894600667732971 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-05-05T09:41:57.778413Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.778428Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-05-05T09:41:57.778436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.778462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.778462Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-05-05T09:41:57.778468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894600667732964 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-05T09:41:57.778469Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.778479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894600667732964 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-05T09:41:57.778481Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:41:57.778497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.778502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.778532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.778536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:41:57.778666Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T09:41:57.778676Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-05-05T09:41:57.778679Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T09:41:57.778681Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-05-05T09:41:57.779261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T09:41:57.779293Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-05-05T09:41:57.779299Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-05-05T09:41:57.779302Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-05-05T09:41:57.779314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T09:41:57.779349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T09:41:57.779366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:41:57.779379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:57.779390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-05-05T09:41:57.779401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T09:41:57.779402Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-05-05T09:41:57.779413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:57.779429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-05-05T09:41:57.779436Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-05-05T09:41:57.779440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:41:57.779450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:41:57.779457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-05-05T09:41:57.779464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-05-05T09:41:57.779467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:41:57.779470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:41:57.779539Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T09:41:57.779648Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T09:41:57.779657Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T09:41:57.779670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T09:41:57.779678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T09:41:57.779684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T09:41:57.779685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T09:41:57.779687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T09:41:57.779688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T09:41:57.779690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T09:41:57.779692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:57.779693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T09:41:57.779696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:41:57.779699Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-05-05T09:41:57.779813Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-05-05T09:41:57.779830Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-05-05T09:41:57.780092Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-05-05T09:41:57.780106Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-05-05T09:41:57.780342Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-05-05T09:41:57.780363Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7500894600667733149:2640], serverId# [2:7500894600667733151:2642], sessionId# [0:0:0] 2025-05-05T09:41:57.780372Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7500894600667733050:2566], serverId# [2:7500894600667733052:2568], sessionId# [0:0:0] 2025-05-05T09:41:57.780401Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T09:41:57.780408Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-05-05T09:41:57.780418Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 Check that tablet 72075186224037889 was deleted Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2025-05-05T09:41:58.079167Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-05-05T09:41:58.079284Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-05-05T09:41:58.079368Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2025-05-05T09:41:58.079446Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-05-05T09:41:53.178300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894584079846059:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.178350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ade/r3tmp/tmpfcn1BU/pdisk_1.dat 2025-05-05T09:41:53.237845Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18888 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.281362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.281392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.283199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:53.312519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.316639Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.323258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.389135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:53.402704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.659516Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894582232138112:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.659592Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ade/r3tmp/tmpsgEIUP/pdisk_1.dat 2025-05-05T09:41:53.679430Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25778 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.767178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.767223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.767558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.768718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:53.779280Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.799901Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.803967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.817866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.828330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ade/r3tmp/tmpbpp8wO/pdisk_1.dat 2025-05-05T09:41:54.168678Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:54.182751Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27355 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.267292Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.267332Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.267668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.269277Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.277934Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.305566Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.309128Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.330222Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.342236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.677028Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894585577695214:2206];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ade/r3tmp/tmpq84Ejx/pdisk_1.dat 2025-05-05T09:41:54.699004Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:54.702143Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28936 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.783226Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.783257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09: ... ecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:56.338497Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.338534Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.338973Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.342992Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:56.348222Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.354749Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.376696Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.391848Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.742148Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894595630861868:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:56.742172Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ade/r3tmp/tmpZbDnSy/pdisk_1.dat 2025-05-05T09:41:56.761930Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20183 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:56.847358Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.847401Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.847722Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.848840Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.855187Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:56.856901Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.869737Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.881541Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.240524Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894601296517547:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.240571Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ade/r3tmp/tmpCKDLv9/pdisk_1.dat 2025-05-05T09:41:57.253814Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26535 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.344972Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.345000Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.345431Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.346407Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.348953Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.363591Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:57.368413Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.383982Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.399920Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.761920Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894601258072022:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ade/r3tmp/tmpwx7BgZ/pdisk_1.dat 2025-05-05T09:41:57.767418Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:57.783044Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4416 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.867281Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.867322Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.867628Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.868656Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.875294Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.880451Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.896279Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.910561Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-05-05T09:41:57.507122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894598748427259:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.507887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a87/r3tmp/tmp0nrEyw/pdisk_1.dat 2025-05-05T09:41:57.572284Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61040 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.643642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.643679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.644459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.644847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.651184Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:57.652608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.713273Z node 1 :TX_PROXY ERROR: Actor# [1:7500894598748427798:2358] txid# 281474976715659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-05-05T09:41:57.714067Z node 1 :TX_PROXY ERROR: Actor# [1:7500894598748427813:2366] txid# 281474976715660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-05-05T09:41:57.714765Z node 1 :TX_PROXY ERROR: Actor# [1:7500894598748427819:2371] txid# 281474976715661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-05-05T09:41:57.715431Z node 1 :TX_PROXY ERROR: Actor# [1:7500894598748427825:2376] txid# 281474976715662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-05-05T09:41:57.993327Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894597242984364:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.993418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a87/r3tmp/tmpVofRLz/pdisk_1.dat 2025-05-05T09:41:58.007255Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19013 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.099297Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.099328Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.099746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.101965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:58.103843Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.112809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-Results] >> TLocksFatTest::RangeSetBreak >> test.py::test[bigdate-table_yt_key_filter-wo_compat-Results] [SKIPPED] >> test.py::test[binding-named_node_corr_names-default.txt-Results] >> TFlatTest::ShardUnfreezeNonFrozen >> TLocksTest::Range_GoodLock1 [GOOD] >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> test.py::test[window-full/session_compact--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-05-05T09:41:57.692851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894599747349065:2267];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.692898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a7b/r3tmp/tmpSpxuIO/pdisk_1.dat 2025-05-05T09:41:57.757916Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21148 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.789668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.793144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.831942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.831966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.833088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:57.857417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.862484Z node 1 :TX_PROXY ERROR: Actor# [1:7500894599747349564:2387] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-05-05T09:41:57.863067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.868039Z node 1 :TX_PROXY ERROR: Actor# [1:7500894599747349604:2421] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-05-05T09:41:58.159430Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894605330280930:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.159469Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a7b/r3tmp/tmpEkTp3x/pdisk_1.dat 2025-05-05T09:41:58.171985Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26451 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.263062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.263091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.263429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.264883Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.272875Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.274456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.338120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.357363Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715660: 2025-05-05T09:41:58.357552Z node 2 :TX_PROXY ERROR: Actor# [2:7500894605330281660:2390] txid# 281474976715660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T09:41:58.357579Z node 2 :TX_PROXY ERROR: Actor# [2:7500894605330281660:2390] txid# 281474976715660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T09:41:58.357589Z node 2 :TX_PROXY ERROR: Actor# [2:7500894605330281660:2390] txid# 281474976715660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T09:41:58.359843Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715661: 2025-05-05T09:41:58.359930Z node 2 :TX_PROXY ERROR: Actor# [2:7500894605330281668:2395] txid# 281474976715661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-05-05T09:41:58.359968Z node 2 :TX_PROXY ERROR: Actor# [2:7500894605330281668:2395] txid# 281474976715661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-05-05T09:41:58.359978Z node 2 :TX_PROXY ERROR: Actor# [2:7500894605330281668:2395] txid# 281474976715661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-05-05T09:41:58.365278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[pg-tpch-q21-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-05-05T09:41:53.459838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894583817122450:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.459857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ac0/r3tmp/tmpnKwgON/pdisk_1.dat 2025-05-05T09:41:53.523087Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1500 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T09:41:53.561938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.561968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.563386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.563624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:53.577922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.606419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.615215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.945037Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894580378123927:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.945405Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ac0/r3tmp/tmpKQ4PPt/pdisk_1.dat 2025-05-05T09:41:53.963083Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22382 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.054897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.054968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.055194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.055551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.062496Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.066038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.082211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.094595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.475089Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894587280057778:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ac0/r3tmp/tmpLDNr5U/pdisk_1.dat 2025-05-05T09:41:54.512450Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:54.523067Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26342 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.589436Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.589465Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.589958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.590354Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.599105Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.605575Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.606815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.621904Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.633377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.987029Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894588192300523:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.987061Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ac0/r3tmp/tmpSgCbEV/pdisk_1.dat 2025-05-05T09:41:55.000319Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7321 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.095124Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.095171Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.095610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboper ... rsion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:56.645218Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.645258Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.645639Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:56.646239Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:56.651278Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.655117Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.671095Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.686769Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.068067Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894600940637730:2070];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ac0/r3tmp/tmpnBlUxy/pdisk_1.dat 2025-05-05T09:41:57.072573Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:57.082570Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16879 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.172325Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.172359Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.172739Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.174423Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.180609Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.187603Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:57.188959Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.212868Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.224181Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ac0/r3tmp/tmp5hHFAW/pdisk_1.dat 2025-05-05T09:41:57.583213Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:57.585577Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26424 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.675806Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.675843Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.676210Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.676648Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.677972Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.684612Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:57.700181Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.713904Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.088457Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894605231606575:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.089271Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ac0/r3tmp/tmpmiXqgk/pdisk_1.dat 2025-05-05T09:41:58.108013Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16997 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.188410Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.188455Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.188804Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.189428Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.203939Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:58.219347Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:58.220710Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.232323Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.246668Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-05-05T09:41:53.759077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894581611121654:2070];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.759132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000abd/r3tmp/tmpdsGQhH/pdisk_1.dat 2025-05-05T09:41:53.824000Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15125 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:53.859233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:53.869271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:53.897763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.897793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.898861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:53.939930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.951517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.227900Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894584137299385:2078];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.228196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000abd/r3tmp/tmpczU60p/pdisk_1.dat 2025-05-05T09:41:54.251148Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9840 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.331042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.331075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.331854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.332652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.335488Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.353860Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.355214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.376942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.399790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.718149Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894586102019957:2137];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.719306Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000abd/r3tmp/tmpuZhctb/pdisk_1.dat 2025-05-05T09:41:54.730378Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17290 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.821960Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.821993Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.822298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.823629Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:54.823916Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.828860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:54.851763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.865880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.244549Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894592248843167:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:55.244659Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000abd/r3tmp/tmpymky3L/pdisk_1.dat 2025-05-05T09:41:55.259111Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27746 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:55.349451Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.349482Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.349894Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:56.913984Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:56.914036Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:56.914259Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.915176Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:56.922005Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.935370Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:56.951131Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.309648Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894598625694226:2205];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.309777Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000abd/r3tmp/tmpn37TXc/pdisk_1.dat 2025-05-05T09:41:57.326240Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24283 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.413220Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.413256Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.413735Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.415413Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.419727Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.425086Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.447961Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.458261Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.785786Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894599390362540:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.785808Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000abd/r3tmp/tmpUoHWd3/pdisk_1.dat 2025-05-05T09:41:57.801115Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64642 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.889924Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.889956Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.890237Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.890991Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:57.891417Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.903206Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:57.919649Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.931297Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.316890Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894604938315092:2206];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.317793Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000abd/r3tmp/tmpbZ3g3I/pdisk_1.dat 2025-05-05T09:41:58.334667Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9653 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.425697Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.425729Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.426180Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.426767Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.428366Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.436814Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.459143Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.471118Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> test.py::test[select-select_all_from_concat_anon-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] >> test.py::test[join-premap_map_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_map_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1--ForceBlocks] >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> TLocksTest::Range_CorrectNullDot >> test.py::test[blocks-interval_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-mod_uint64--Results] >> TLocksTest::Range_BrokenLock0 |81.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> TCancelTx::ImmediateReadOnly [GOOD] >> test.py::test[window-row_number_to_map-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_stat--Results] >> test.py::test[aggregate-ensure_count-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Results] >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> TObjectStorageListingTest::CornerCases >> test.py::test[blocks-top_sort_two_asc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2025-05-05T09:41:54.239136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894584274328468:2265];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.239189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aab/r3tmp/tmpCTBYQE/pdisk_1.dat 2025-05-05T09:41:54.319995Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18849 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.389759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.389783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.390351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.391243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.394984Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.402624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.404035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.429388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.441838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aab/r3tmp/tmp92wP3z/pdisk_1.dat 2025-05-05T09:41:54.743069Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:54.743299Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65285 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.820703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.820742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.821073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.822777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.826435Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.837848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.851355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.869262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.218184Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894588429173745:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aab/r3tmp/tmp9QWbJK/pdisk_1.dat 2025-05-05T09:41:55.221220Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:55.232210Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7461 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.322572Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.322621Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.323083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.324660Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:55.327240Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.332457Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:55.347741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:55.363214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aab/r3tmp/tmpEpi5c8/pdisk_1.dat 2025-05-05T09:41:55.743047Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:55.743469Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28950 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.824454Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.824488Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.824886Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:55.825356Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T ... urityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.370472Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.370510Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.370832Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.371693Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.373578Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.385523Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.404710Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.420875Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.758092Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894600047304792:2139];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.759030Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aab/r3tmp/tmpzrqTuE/pdisk_1.dat 2025-05-05T09:41:57.773325Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29863 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.862019Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.862049Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.862395Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.863352Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:57.864694Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.873245Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.888790Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.904081Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.273369Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894601515596417:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.279085Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aab/r3tmp/tmpfES5VY/pdisk_1.dat 2025-05-05T09:41:58.298517Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18748 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.384263Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.384303Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.384636Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:58.386684Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:58.386966Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:58.391024Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.449287Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.470086Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.802402Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894602738294566:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.802652Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aab/r3tmp/tmpYhR07k/pdisk_1.dat 2025-05-05T09:41:58.821106Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14858 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.909159Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.909199Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.909561Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.910422Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.919093Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.922876Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:58.928483Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.952776Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.964345Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::RejectByPerRequestSize >> TFlatTest::AutoMergeBySize >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> TLocksTest::UpdateLockedKey >> TFlatTest::SelectRangeForbidNullArgs2 >> test.py::test[key_filter-complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-complex-default.txt-Results] >> TLocksTest::GoodLock >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> TLocksTest::Range_Pinhole [GOOD] >> TObjectStorageListingTest::TestFilter >> TLocksTest::SetBreakSetEraseBreak >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> TObjectStorageListingTest::Split >> test.py::test[select-select_all_from_concat_anon-default.txt-Results] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-05-05T09:41:58.249978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894602695732558:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.250100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a77/r3tmp/tmpGePEgw/pdisk_1.dat 2025-05-05T09:41:58.319266Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20042 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T09:41:58.351245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.351277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.351944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.388553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.400043Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:58.407849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:20042 2025-05-05T09:41:58.474740Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733134:2377] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T09:41:58.474771Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733134:2377] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:58.485835Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733147:2387] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T09:41:58.485879Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733147:2387] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:58.496149Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733160:2397] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T09:41:58.496174Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733160:2397] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:58.510359Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733186:2417] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T09:41:58.510383Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733186:2417] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:58.517149Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733199:2427] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T09:41:58.517178Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733199:2427] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:58.523777Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733212:2437] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T09:41:58.523806Z node 1 :TX_PROXY ERROR: Actor# [1:7500894602695733212:2437] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:58.723576Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894601298117695:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.729021Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a77/r3tmp/tmpEuie8S/pdisk_1.dat 2025-05-05T09:41:58.745952Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17397 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.833643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.833681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.834006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.834624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:58.836302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:17397 2025-05-05T09:41:59.188544Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894608527465512:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.188564Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a77/r3tmp/tmpkd1vPM/pdisk_1.dat 2025-05-05T09:41:59.208716Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63615 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.292948Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.292974Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.293292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.294796Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:59.300471Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:59.301598Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:63615 2025-05-05T09:41:59.362812Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466232:2379] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T09:41:59.362846Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466232:2379] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:59.370856Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466247:2391] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T09:41:59.370894Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466247:2391] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:59.377623Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466259:2400] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-05-05T09:41:59.377653Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466259:2400] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:59.403262Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466288:2423] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T09:41:59.403295Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466288:2423] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:59.418133Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466303:2435] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T09:41:59.418162Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466303:2435] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:59.427106Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466317:2446] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-05-05T09:41:59.427136Z node 3 :TX_PROXY ERROR: Actor# [3:7500894608527466317:2446] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:59.670437Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894607856173511:2060];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.670819Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a77/r3tmp/tmp7wCtEl/pdisk_1.dat 2025-05-05T09:41:59.693613Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21758 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.779793Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.779835Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.780202Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.780996Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:59.783686Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.785287Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21758 2025-05-05T09:41:59.842689Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-05-05T09:41:59.843017Z node 4 :TX_PROXY ERROR: Actor# [4:7500894607856174231:2378] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-05-05T09:41:59.845317Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-05-05T09:41:59.845427Z node 4 :TX_PROXY ERROR: Actor# [4:7500894607856174245:2386] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> TLocksFatTest::PointSetNotBreak >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-05-05T09:41:59.402011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894606675534293:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.402040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a56/r3tmp/tmprhAz9Z/pdisk_1.dat 2025-05-05T09:41:59.470255Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15284 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.542707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.542743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.543715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:59.544497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:59.552664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.615803Z node 1 :TX_PROXY ERROR: Actor# [1:7500894606675534972:2357] txid# 281474976715659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-05-05T09:41:59.894590Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894606325696861:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a56/r3tmp/tmpMS2dz0/pdisk_1.dat 2025-05-05T09:41:59.907091Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:59.911283Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14022 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.939448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.942440Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.945954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.000232Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.000268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.001626Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.112883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.125024Z node 2 :TX_PROXY ERROR: Actor# [2:7500894610620664732:2388] txid# 281474976715660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2025-05-05T09:42:00.125657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.140711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards >> test.py::test[blocks-decimal_multiplicative_ops--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] >> TObjectStorageListingTest::Decimal [GOOD] >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock >> test.py::test[tpch-q11-default.txt-ForceBlocks] [GOOD] >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TFlatTest::RejectByPerRequestSize [GOOD] >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> TObjectStorageListingTest::TestSkipShards [FAIL] >> test.py::test[join-full_equal_null-off-ForceBlocks] [GOOD] >> test.py::test[join-full_equal_null-off-Results] [SKIPPED] >> test.py::test[join-join_no_correlation_in_order_by--ForceBlocks] >> TObjectStorageListingTest::SuffixColumns [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--Results] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2025-05-05T09:42:00.178906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894613806972837:2131];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.180207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a40/r3tmp/tmpNEq01m/pdisk_1.dat 2025-05-05T09:42:00.234864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25726, node 1 2025-05-05T09:42:00.249370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:42:00.249388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:42:00.249390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:42:00.249444Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17704 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T09:42:00.283120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.283150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.284142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.315847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.323484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.331572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:00.343643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.845762Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894613886782020:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.845790Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a40/r3tmp/tmpKsChAI/pdisk_1.dat 2025-05-05T09:42:00.875385Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26521, node 2 2025-05-05T09:42:00.892290Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:42:00.892303Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:42:00.892306Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:42:00.892361Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17365 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.953998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.954019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.954322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.955285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.955341Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.963157Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.976338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> test.py::test[binding-named_node_corr_names-default.txt-Results] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-05-05T09:42:00.635277Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894613872617759:2136];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.636160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a15/r3tmp/tmpIenkw5/pdisk_1.dat 2025-05-05T09:42:00.699225Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8531 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.771106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.771140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.772245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.772828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:00.784691Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:00.785870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.097498Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894618199652840:2198];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.097641Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a15/r3tmp/tmp7w5hiO/pdisk_1.dat 2025-05-05T09:42:01.114266Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5014 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.202698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.202729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.203047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.203773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.204966Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.223739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ab7/r3tmp/tmpyJbwT3/pdisk_1.dat 2025-05-05T09:41:54.226012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:54.242695Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4820 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.313713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.313744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.314550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.318663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.323214Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.339135Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:54.347873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.970394Z node 1 :TX_DATASHARD ERROR: Transaction read size 51002133 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-05-05T09:41:56.970468Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002133 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-05-05T09:41:56.970534Z node 1 :TX_PROXY ERROR: Actor# [1:7500894594361655249:2909] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-05-05T09:41:57.347193Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894597193872216:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.347313Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ab7/r3tmp/tmp3zirO3/pdisk_1.dat 2025-05-05T09:41:57.364608Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18191 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:57.452271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.452303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.452725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:57.454480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:57.454898Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.467695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.993998Z node 2 :TX_DATASHARD ERROR: Transaction read size 51002181 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-05-05T09:41:59.994079Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002181 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-05-05T09:41:59.994511Z node 2 :TX_PROXY ERROR: Actor# [2:7500894605783808264:2905] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ab7/r3tmp/tmpIqrhxR/pdisk_1.dat 2025-05-05T09:42:00.379071Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29664 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.462965Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.462999Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.463400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.464625Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:00.474432Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.476691Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:00.487176Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.374256Z node 3 :TX_PROXY DEBUG: actor# [3:7500894612103114469:2086] Handle TEvProposeTransaction 2025-05-05T09:42:01.374272Z node 3 :TX_PROXY DEBUG: actor# [3:7500894612103114469:2086] TxId# 281474976715700 ProcessProposeTransaction 2025-05-05T09:42:01.374282Z node 3 :TX_PROXY DEBUG: actor# [3:7500894612103114469:2086] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7500894616398082868:2598] DataReq marker# P0 2025-05-05T09:42:01.374300Z node 3 :TX_PROXY DEBUG: Actor# [3:7500894616398082868:2598] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2025-05-05T09:42:01.374405Z node 3 :TX_PROXY DEBUG: Actor [3:7500894616398082868:2598] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-05-05T09:42:01.374407Z node 3 :TX_PROXY DEBUG: Actor [3:7500894616398082868:2598] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-05-05T09:42:01.374411Z node 3 :TX_PROXY DEBUG: Actor# [3:7500894616398082868:2598] txid# 281474976715700 SEND to# [3:7500894612103114501:2101] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-05-05T09:42:01.374456Z node 3 :TX_PROXY DEBUG: Actor# [3:7500894616398082868:2598] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-05-05T09:42:01.374695Z node 3 :TX_PROXY DEBUG: Actor# [3:7500894616398082868:2598] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-05-05T09:42:01.374758Z node 3 :TX_PROXY DEBUG: Actor# [3:7500894616398082868:2598] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-05-05T09:42:01.374840Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:42:01.374856Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-05-05T09:42:01.375137Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2025-05-05T09:42:01.375181Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2025-05-05T09:42:01.376535Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T09:42:01.376548Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-05-05T09:42:01.376632Z node 3 :TX_PROXY DEBUG: Actor# [3:7500894616398082868:2598] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000431 out readset size 0 marker# P6 2025-05-05T09:42:01.376642Z node 3 :TX_PROXY DEBUG: Actor# [3:7500894616398082868:2598] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000775 out readset size 0 marker# P6 2025-05-05T09:42:01.376651Z node 3 :TX_PROXY ERROR: Actor# [3:7500894616398082868:2598] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001206 exceeded limit 10000 Status# ExecError 2025-05-05T09:42:01.376668Z node 3 :TX_PROXY ERROR: Actor# [3:7500894616398082868:2598] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2025-05-05T09:42:01.376945Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2025-05-05T09:42:01.376963Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2025-05-05T09:42:01.377065Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2025-05-05T09:42:01.377074Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 >> test.py::test[key_filter-complex-default.txt-Results] [GOOD] >> test.py::test[like-ilike_clause-default.txt-ForceBlocks] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TLocksFatTest::LocksLimit [GOOD] |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |81.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[tpch-q11-default.txt-ForceBlocks] [GOOD] >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 >> test.py::test[blocks-mod_uint64--Results] [GOOD] >> test.py::test[blocks-mod_uint64_opt2--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2025-05-05T09:41:58.280573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894605365566424:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.280874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a72/r3tmp/tmpN3JbQw/pdisk_1.dat 2025-05-05T09:41:58.364759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:58.380826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.380871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.381950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21370 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.422577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.431116Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.439294Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:58.446499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.479717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.491665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.763150Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a72/r3tmp/tmpfzJxx5/pdisk_1.dat 2025-05-05T09:41:58.764530Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25577 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.856981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.857016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.857561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.858061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.859386Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.874136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.891841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:58.903143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a72/r3tmp/tmpwYblV1/pdisk_1.dat 2025-05-05T09:41:59.274315Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:59.278545Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32685 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.357727Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.357764Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.358191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.358801Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:59.360054Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.363937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:59.379697Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.400007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.742823Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894608992589590:2237];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.742861Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a72/r3tmp/tmpGiTDjM/pdisk_1.dat 2025-05-05T09:41:59.767960Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17245 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.848356Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.848390Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.848791Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.849355Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:59.850165Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:59.860259Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.870799Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.890729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a72/r3tmp/tmp9EziTX/pdisk_1.dat 2025-05-05T09:42:00.259133Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:42:00.262789Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:5199 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.344351Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.344390Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.344697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.345496Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.347067Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.356148Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.381546Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.401286Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.784750Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7500894612847887436:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a72/r3tmp/tmpMUY6O4/pdisk_1.dat 2025-05-05T09:42:00.800649Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:00.804392Z node 6 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25629 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:00.889220Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.889259Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.889684Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.890891Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.891141Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.907553Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:00.911987Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.923407Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.935112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.257903Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500894617382216252:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a72/r3tmp/tmpuTMWCP/pdisk_1.dat 2025-05-05T09:42:01.258403Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:01.270464Z node 7 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17765 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.361401Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.361440Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.361867Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.362815Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.375271Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.386146Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.417130Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.427982Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] [GOOD] >> test.py::test[blocks-interval_div_scalar--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-05-05T09:42:00.837498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894611996383038:2200];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.838507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/0009e9/r3tmp/tmpHI5Q1x/pdisk_1.dat 2025-05-05T09:42:00.919578Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5633, node 1 2025-05-05T09:42:00.936831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:42:00.936841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:42:00.936843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:42:00.936881Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20065 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.998579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.998617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.999433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.001058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.002442Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.022080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438121085 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438121085 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) 2025-05-05T09:42:01.510415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894618381562875:2072];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.510630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/0009e9/r3tmp/tmpqfpY84/pdisk_1.dat 2025-05-05T09:42:01.528053Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4334, node 2 2025-05-05T09:42:01.543202Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:42:01.543222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:42:01.543224Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:42:01.543275Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16848 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.614307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.614352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.615150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.615500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.618052Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.627131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.756435Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7500894618381564166:2483], Recipient [2:7500894618381563511:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-05-05T09:42:01.756452Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-05-05T09:42:01.756487Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:01.756542Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-05-05T09:42:01.756551Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-05-05T09:42:01.756556Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-05-05T09:42:01.756561Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-05-05T09:42:01.756566Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-05-05T09:42:01.756585Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-05-05T09:42:01.779675Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7500894618381564170:2484], Recipient [2:7500894618381563511:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-05-05T09:42:01.779690Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-05-05T09:42:01.779729Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:01.779783Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-05-05T09:42:01.779792Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-05-05T09:42:01.779809Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 >> TLocksTest::GoodLock [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] >> TLocksTest::GoodNullLock >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-05-05T09:41:58.937034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894603416046382:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.937056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a60/r3tmp/tmpP9JXEd/pdisk_1.dat 2025-05-05T09:41:58.998770Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21638 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.072921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.072947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.073607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.075027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:59.082011Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:59.094409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.115423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.172790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.230748Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894617519839472:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.232984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a60/r3tmp/tmpQlPKLd/pdisk_1.dat 2025-05-05T09:42:01.245576Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16947 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.335538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.335569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.335942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.337427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:01.337569Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.344377Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:01.348644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.406012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.417314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.756260Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894616330504130:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.756280Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a60/r3tmp/tmpFZcRhL/pdisk_1.dat 2025-05-05T09:42:01.771867Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10648 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.860853Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.860888Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.861138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.861852Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.866111Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.869056Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:01.870222Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.886703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.903734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::LargeDatashardReplyRW [GOOD] >> TLocksTest::CK_Range_GoodLock [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] >> TLocksTest::BrokenNullLock [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test.py::test[join-premap_merge_extrasort1--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-05-05T09:41:57.593578Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894599654420345:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.593680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a83/r3tmp/tmpovugPJ/pdisk_1.dat 2025-05-05T09:41:57.657720Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9172 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:57.728372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:57.728408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:57.729518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:57.730209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.739587Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.748472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.819997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:57.831319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a83/r3tmp/tmpkHN57S/pdisk_1.dat 2025-05-05T09:41:58.060530Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:58.069567Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26939 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.157590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.157623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.158019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.158480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.159886Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.167720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.183479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.196941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.584576Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894601821814481:2132];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.589650Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a83/r3tmp/tmpG1moIU/pdisk_1.dat 2025-05-05T09:41:58.616584Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6191 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.705443Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.705490Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.705812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.706270Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.707323Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.716270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.734390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.745591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a83/r3tmp/tmplgnqE6/pdisk_1.dat 2025-05-05T09:41:59.087478Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:41:59.098135Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16262 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.186510Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.186552Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.187103Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.187918Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:59.190956Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2 ... { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:00.832447Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.832479Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.832862Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.834516Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.835330Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.847802Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.876163Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.893905Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.247724Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894614201843020:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.248572Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a83/r3tmp/tmpPZZzHT/pdisk_1.dat 2025-05-05T09:42:01.275221Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10559 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.353742Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.353790Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.354162Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.355343Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.356337Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.367133Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.382119Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.402250Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.773700Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894617432391658:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.773720Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a83/r3tmp/tmpoe0VBk/pdisk_1.dat 2025-05-05T09:42:01.790515Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8572 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.877730Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.877765Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.878150Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.879569Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:42:01.879781Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.891715Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.914367Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:01.927354Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.301531Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894620877912808:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:02.301556Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a83/r3tmp/tmpZ44nPP/pdisk_1.dat 2025-05-05T09:42:02.316567Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27233 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:02.408550Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:02.408585Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:02.408875Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.409613Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:02.412163Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.452488Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.462363Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.472819Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[pg-tpch-q03-default.txt-Results] [GOOD] >> TLocksFatTest::RangeSetNotBreak [GOOD] >> TLocksTest::GoodSameShardLock [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-05-05T09:41:58.151706Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894605146518465:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.151732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a79/r3tmp/tmpACOI6P/pdisk_1.dat 2025-05-05T09:41:58.211594Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25743 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.259709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.263065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:58.272428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.291080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.291106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.294959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:58.338196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:58.354125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.652181Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894604161028914:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.652282Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a79/r3tmp/tmprvgIOF/pdisk_1.dat 2025-05-05T09:41:58.670349Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10107 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.759315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.759350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:58.759846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.760600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:58.767284Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.779612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.841312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.857471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a79/r3tmp/tmpUZxlKo/pdisk_1.dat 2025-05-05T09:41:59.162299Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:59.164493Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:32764 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.255014Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.255018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.255052Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.256402Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:59.264802Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.267276Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:59.268341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.284083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.296088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.670752Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894606979287566:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.671102Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a79/r3tmp/tmppIYlPM/pdisk_1.dat 2025-05-05T09:41:59.688995Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25574 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.775689Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.775714Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.776053Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: ... { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.316091Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.316136Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.316489Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.317635Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.320924Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.332853Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:01.348566Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.360905Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a79/r3tmp/tmpNgBiVS/pdisk_1.dat 2025-05-05T09:42:01.757461Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894618098229935:2217];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.761278Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:01.778492Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16267 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:01.863306Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.863346Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.863751Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.864249Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:01.865880Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.869871Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.888409Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.903141Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.265432Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894620298379548:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:02.265486Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a79/r3tmp/tmpw8exxJ/pdisk_1.dat 2025-05-05T09:42:02.282161Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65084 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:02.369547Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:02.369582Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:02.369950Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.370750Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:02.372188Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.381855Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.397885Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:02.411818Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.783558Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894620507519761:2205];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:02.783657Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a79/r3tmp/tmpOCknf0/pdisk_1.dat 2025-05-05T09:42:02.813115Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3092 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:02.887699Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:02.887728Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:02.888035Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.888843Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:02.890412Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.901021Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:02.922211Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.942603Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-05-05T09:41:55.375391Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894589111176687:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:55.376105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aa7/r3tmp/tmpG4LSJM/pdisk_1.dat 2025-05-05T09:41:55.441734Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5435 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:55.475719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:55.475748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:55.476873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:55.513458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:55.527365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.999259Z node 1 :MINIKQL_ENGINE ERROR: Shard %72075186224037889, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-05-05T09:41:59.008167Z node 1 :TX_DATASHARD ERROR: Datashard execution error for [1746438118572:281474976716360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-05-05T09:41:59.009840Z node 1 :TX_PROXY ERROR: Actor# [1:7500894601996084814:5907] txid# 281474976716360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-05-05T09:41:59.009931Z node 1 :TX_PROXY ERROR: Actor# [1:7500894601996084814:5907] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2025-05-05T09:41:59.359549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894608053419253:2206];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000aa7/r3tmp/tmpY7V7nH/pdisk_1.dat 2025-05-05T09:41:59.368853Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:41:59.374363Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14473 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.462788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.462821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.463310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.463845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:59.475382Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.477842Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:59.478818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.965428Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-05T09:42:02.969529Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-05-05T09:42:02.973395Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-05-05T09:42:02.979079Z node 2 :TX_PROXY ERROR: Actor# [2:7500894620938327360:5887] txid# 281474976716361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> TLocksTest::SetLockNothing [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] >> test.py::test[select-select_all_ordered-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Results] >> test.py::test[blocks-top_sort_two_mix--Results] [GOOD] >> test.py::test[coalesce-coalesce_few_opt--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-05-05T09:41:58.323555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894603583786497:2248];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.334075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a6e/r3tmp/tmpeuEHsM/pdisk_1.dat 2025-05-05T09:41:58.403705Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1487 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.468113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.468150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.468880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.471422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.480180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.549999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.560521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a6e/r3tmp/tmpfoYb8m/pdisk_1.dat 2025-05-05T09:41:58.813971Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:58.814230Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:25929 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:58.905967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.906008Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.906311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.906975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:58.908225Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.917788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:58.943687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.960664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.311424Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894605657431871:2083];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.313225Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a6e/r3tmp/tmpWk2JIB/pdisk_1.dat 2025-05-05T09:41:59.332125Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6552 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.413780Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.413811Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:41:59.414196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.414736Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:59.415953Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.420334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.437311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.449785Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.813485Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894605913429353:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.813536Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a6e/r3tmp/tmpoxUZOm/pdisk_1.dat 2025-05-05T09:41:59.836933Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3452 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:59.917684Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.917723Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.918109Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.920321Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:41:59.920406Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) Volatile ... ecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.484270Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.484308Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.484556Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.485413Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.490050Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.499234Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.514260Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.528824Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a6e/r3tmp/tmpkqNrTv/pdisk_1.dat 2025-05-05T09:42:01.885382Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894614414415880:2257];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.885450Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:01.900288Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23432 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.985062Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.985097Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.985441Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.986061Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:01.986855Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.144567Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:02.155430Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.172217Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.394238Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894619571385892:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:02.394413Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a6e/r3tmp/tmpZVWnGY/pdisk_1.dat 2025-05-05T09:42:02.413236Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8069 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:02.498527Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:02.498572Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:02.498856Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.499927Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:02.507476Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.513969Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:02.515414Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.540025Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.552917Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.915380Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894620841465274:2073];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:02.915415Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a6e/r3tmp/tmpfBg908/pdisk_1.dat 2025-05-05T09:42:02.948703Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65201 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.000882Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.002506Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:03.011728Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:42:03.034104Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.034144Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.035237Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:03.084882Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.108156Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> test.py::test[lambda-lambda_simple-default.txt-ForceBlocks] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-05-05T09:41:59.216144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894607847700510:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.216162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a58/r3tmp/tmp0aeJsh/pdisk_1.dat 2025-05-05T09:41:59.276429Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11550 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.318072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.318099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.319183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:59.348978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.353080Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:59.360012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.380660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.439721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.524686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a58/r3tmp/tmpEbujAS/pdisk_1.dat 2025-05-05T09:42:01.530833Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65485 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.625836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.625861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.626224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.626737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.635089Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.642353Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.643779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.658813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.669744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-05-05T09:41:58.839866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894603676630403:2199];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:58.839973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a68/r3tmp/tmpYqNIQO/pdisk_1.dat 2025-05-05T09:41:58.906062Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2865 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:58.980358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:58.980404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:58.981284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:58.984406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:41:58.986112Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:58.998582Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:59.000733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.067694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:59.083552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.319079Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894606729344940:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.319097Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a68/r3tmp/tmpVc4toR/pdisk_1.dat 2025-05-05T09:41:59.337558Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17811 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.422603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.422639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.423069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.423623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:59.431202Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.434137Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:41:59.435202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.495203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.506086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:59.823079Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894606892001038:2090];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.823116Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a68/r3tmp/tmp4fxVnW/pdisk_1.dat 2025-05-05T09:41:59.834081Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19130 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:59.857047Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-05-05T09:41:59.860405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-05-05T09:41:59.925825Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:59.925861Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:59.927108Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.032065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.044269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a68/r3tmp/tmpBVtFcW/pdisk_1.dat 2025-05-05T09:42:00.358647Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894612385185949:2137];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.363494Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:00.382363Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12983 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.464985Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.465021Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState ... eTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.995429Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.995475Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.995755Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.996421Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.998404Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.003285Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.018206Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.032140Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.393812Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894620008346691:2065];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:02.394161Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a68/r3tmp/tmpx1MJKl/pdisk_1.dat 2025-05-05T09:42:02.408256Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7457 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:02.498500Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.499833Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:02.499849Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:02.501092Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:02.502172Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.514226Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:02.529022Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.547819Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a68/r3tmp/tmp82m73M/pdisk_1.dat 2025-05-05T09:42:02.927384Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:42:02.943945Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6040 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.022519Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.022560Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.023000Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:03.024867Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:03.027892Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.036225Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.056169Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.072112Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.501296Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894623478231998:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:03.501329Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a68/r3tmp/tmpkX1fiX/pdisk_1.dat 2025-05-05T09:42:03.529458Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1431 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.606237Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.606277Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.606561Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.608489Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:42:03.608777Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:03.620607Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.636697Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.649765Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_drain.py::TestHive::test_drain_on_stop >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable >> TLocksTest::Range_EmptyKey [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-05-05T09:42:00.526212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894611489044886:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a36/r3tmp/tmpwPApl6/pdisk_1.dat 2025-05-05T09:42:00.572511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:00.597429Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4368 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T09:42:00.618152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.618194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.619280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.663999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:00.676665Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:42:00.679485Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.683930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.755836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.770388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.027892Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894616695224423:2256];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.027916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a36/r3tmp/tmp8vYLOT/pdisk_1.dat 2025-05-05T09:42:01.049350Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29856 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:01.134813Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.134843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.135255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.135755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:01.137484Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:01.142547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.170115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.182770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a36/r3tmp/tmpC6dTs0/pdisk_1.dat 2025-05-05T09:42:01.540999Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:42:01.542292Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26712 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.632966Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.632996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... waiting... 2025-05-05T09:42:01.633421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.634987Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:01.635475Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:42:01.638847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:01.657970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.667771Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a36/r3tmp/tmp7fRbx6/pdisk_1.dat 2025-05-05T09:42:02.166408Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:42:02.176450Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3048 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:02.265210Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:02.265255Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:02.265611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.266191Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:02.276423Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.307275Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:02.317015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a36/r3tmp/tmptcu7fw/pdisk_1.dat 2025-05-05T09:42:02.703507Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7500894620643691892:2214];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:02.706532Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:02.718062Z node 5 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10872 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:02.803329Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:02.803368Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:02.803735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:02.804636Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:02.814002Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.823133Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:02.824597Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.851077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.864908Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a36/r3tmp/tmpTuLkqB/pdisk_1.dat 2025-05-05T09:42:03.215227Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:42:03.226442Z node 6 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5727 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.312326Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.312357Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.312634Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:03.313323Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:03.319363Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.345866Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.362162Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.374443Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.734868Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7500894622975281357:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:03.734893Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a36/r3tmp/tmpgApY16/pdisk_1.dat 2025-05-05T09:42:03.755441Z node 7 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16666 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.839687Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.839731Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.840081Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:03.840681Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:03.843363Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.854442Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.867325Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:03.880781Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> test.py::test[blocks-combine_all_avg--Results] [GOOD] >> test.py::test[window-win_func_aggr_stat--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] >> test.py::test[join-premap_merge_extrasort1--Results] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-ForceBlocks] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] [GOOD] >> test.py::test[window-win_func_first_last_with_part--ForceBlocks] >> TLocksTest::Range_BrokenLock1 [GOOD] >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> test.py::test[select-select_all_ordered-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-ForceBlocks] >> TLocksFatTest::PointSetRemove [GOOD] >> test.py::test[blocks-mod_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-pg_from_dates--Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |81.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[pg-tpch-q03-default.txt-Results] [GOOD] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[pg-tpch-q21-default.txt-Results] [GOOD] >> test.py::test[produce-process_rows_sorted_desc_multi_out--ForceBlocks] >> TLocksTest::GoodNullLock [GOOD] >> test.py::test[join-join_no_correlation_in_order_by--ForceBlocks] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-05-05T09:41:59.942307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894606814138558:2058];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:59.942327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a53/r3tmp/tmpsCD6Me/pdisk_1.dat 2025-05-05T09:42:00.011381Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10860 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.055994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.071128Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.073525Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-05-05T09:42:00.074848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.077634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.077655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.078793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.141109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.152065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.433090Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894611732077386:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.433554Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a53/r3tmp/tmpJOXn2c/pdisk_1.dat 2025-05-05T09:42:00.452415Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24279 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.536467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.536497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.536804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.537892Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:00.538667Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.547726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.569169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.581170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.935139Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894614009385981:2207];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.935263Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a53/r3tmp/tmp7OaubL/pdisk_1.dat 2025-05-05T09:42:00.947327Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14770 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.039924Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.039951Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.040333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.041067Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:01.048859Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.059377Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:01.060786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.076751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.137825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a53/r3tmp/tmp6yl8yN/pdisk_1.dat 2025-05-05T09:42:01.456229Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894617593552465:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.456245Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:01.472140Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14053 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.560544Z node 4 :HIVE WARN: HIVE#72057594037968897 ... curityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.103347Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.103384Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.103770Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:03.105140Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:03.107572Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.137297Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.204770Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.224860Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.522440Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894625146723749:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a53/r3tmp/tmpCLNPbS/pdisk_1.dat 2025-05-05T09:42:03.533766Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:03.551207Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26909 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.632705Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.632738Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.633113Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:03.633691Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:03.647267Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.654166Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:03.655349Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.671081Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.686312Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.954143Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894624895810753:2137];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a53/r3tmp/tmpMdAKXW/pdisk_1.dat 2025-05-05T09:42:03.959542Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:03.979657Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29104 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:04.058583Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:04.058615Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:04.059036Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.059586Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:04.060243Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.069206Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.085004Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.098068Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.468329Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894627150779908:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:04.468400Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a53/r3tmp/tmppAVKpe/pdisk_1.dat 2025-05-05T09:42:04.486219Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63645 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:04.572220Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:04.572251Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:04.572631Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:04.574630Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:04.574947Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.589042Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.609598Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.623666Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[produce-process_rows_sorted_desc_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_rows_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-ForceBlocks] >> test.py::test[like-ilike_clause-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint8--Results] >> test.py::test[like-ilike_clause-default.txt-Results] >> test.py::test[produce-reduce_all_list-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_by_struct-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-05-05T09:42:00.063456Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894613392682092:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.063596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a48/r3tmp/tmpXh2xKP/pdisk_1.dat 2025-05-05T09:42:00.137185Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:31083 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T09:42:00.164785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.164811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-05T09:42:00.165893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.211855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.214720Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.217948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.240212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:00.252301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.543214Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894611926338322:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.543234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a48/r3tmp/tmpaXWlcC/pdisk_1.dat 2025-05-05T09:42:00.554176Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7247 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:00.649179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.649204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.649484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.651430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.651490Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.661996Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:00.663212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.680887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.703861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a48/r3tmp/tmp7atOaY/pdisk_1.dat 2025-05-05T09:42:01.046234Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894615346454617:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.046259Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:01.062087Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15945 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.149842Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.149871Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.150195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.150820Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:42:01.152142Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T09:42:01.161220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.180149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.193782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.544681Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894616894784009:2074];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.544956Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a48/r3tmp/tmpj8srBR/pdisk_1.dat 2025-05-05T09:42:01.563497Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26257 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.649667Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.649722Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState ... SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.467277Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.467312Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.469682Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:03.479574Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.491274Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.535092Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:03.541617Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.569273Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.580419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.870904Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894626681379379:2204];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:03.871111Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a48/r3tmp/tmpaDBIQR/pdisk_1.dat 2025-05-05T09:42:03.886298Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8675 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.975045Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.975085Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.975622Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.976233Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:03.998181Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:04.008717Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.029074Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.040062Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a48/r3tmp/tmpE9R9Ed/pdisk_1.dat 2025-05-05T09:42:04.386460Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894628760447780:2154];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:04.397321Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:04.417951Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11768 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:04.489825Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:04.489861Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:04.490156Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:04.491094Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:04.493423Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.510359Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:04.511515Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:04.525384Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:42:04.541453Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.882512Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894627398971204:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:04.882540Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a48/r3tmp/tmp9t0285/pdisk_1.dat 2025-05-05T09:42:04.905585Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65505 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:04.991361Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:04.991398Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:04.991830Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.992504Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:04.999536Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:05.015732Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:05.030319Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 |81.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[blocks-combine_all_avg--Results] [GOOD] >> TObjectStorageListingTest::SchemaChecks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [FAIL] Test command err: 2025-05-05T09:42:00.821373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894610813976159:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.821557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/0009f7/r3tmp/tmpxf9dJi/pdisk_1.dat 2025-05-05T09:42:00.881570Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17876, node 1 2025-05-05T09:42:00.904342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:42:00.904356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:42:00.904359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:42:00.904402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20930 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-05-05T09:42:00.922532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.922555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-05-05T09:42:00.923641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:00.956041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:00.959299Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.983458Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:00.986067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/0009f7/r3tmp/tmpVcH978/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10756, node 2 TClient is connected to server localhost:63219 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... equal assertion failed at ydb/core/client/object_storage_listing_ut.cpp:1075, virtual void NKikimr::NFlatTests::NTestSuiteTObjectStorageListingTest::TTestCaseTestSkipShards::Execute_(NUnitTest::TTestContext &): 2 == count TBackTrace::Capture()+28 (0x1360AEFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x1377BD29) NKikimr::NFlatTests::NTestSuiteTObjectStorageListingTest::TTestCaseTestSkipShards::Execute_(NUnitTest::TTestContext&)+19498 (0x13504C8A) NKikimr::NFlatTests::NTestSuiteTObjectStorageListingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1350CE77) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1377DBDE) NKikimr::NFlatTests::NTestSuiteTObjectStorageListingTest::TCurrentTest::Execute()+481 (0x1350C811) NUnitTest::TTestFactory::Execute()+803 (0x1377E353) NUnitTest::RunMain(int, char**)+3021 (0x1378FF0D) ??+0 (0x7F07D3A02D90) __libc_start_main+128 (0x7F07D3A02E40) _start+41 (0x125DA029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-05-05T09:42:01.124987Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894616517361618:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:01.125070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/0009e0/r3tmp/tmp2lJQpk/pdisk_1.dat 2025-05-05T09:42:01.190766Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29921 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.261643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.261665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.262334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.266205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:01.266828Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:01.276999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T09:42:01.302249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.312180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.332366Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894625063459509:2203];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:03.343327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/0009e0/r3tmp/tmpZsCbS0/pdisk_1.dat 2025-05-05T09:42:03.383550Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17725 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:42:03.455301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.455333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.455738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:03.456901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:03.457450Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.469111Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:03.484946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.497360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.510314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.438532Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894628352303010:2061];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:04.438556Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/0009e0/r3tmp/tmpQErsIz/pdisk_1.dat 2025-05-05T09:42:04.454121Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64286 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:04.543066Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:04.543108Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:04.543461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:04.544097Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:04.547191Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.552381Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:04.555481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:04.567855Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T09:42:04.580809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-05-05T09:42:00.707150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894610361168609:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:00.707361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a07/r3tmp/tmpAWMGb5/pdisk_1.dat 2025-05-05T09:42:00.767459Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6057 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:00.808626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:00.808659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:00.809876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:00.842348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:42:00.847885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.910022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:00.919276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.183414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894614589214464:2137];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a07/r3tmp/tmpZRaGQX/pdisk_1.dat 2025-05-05T09:42:01.187828Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:01.198881Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4934 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.289025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.289061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.289474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.291051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:01.295226Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.303933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.323228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.334692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.699728Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7500894616132475930:2149];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a07/r3tmp/tmp00c4fn/pdisk_1.dat 2025-05-05T09:42:01.708673Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:01.723397Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13307 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:01.803045Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:01.803078Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:01.803281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.804057Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:01.805882Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.814880Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-05-05T09:42:01.816112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.829919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:01.843319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:02.164975Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7500894618828728929:2137];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a07/r3tmp/tmpHCQsHp/pdisk_1.dat 2025-05-05T09:42:02.180839Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:02.182386Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8698 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:02.268854Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:02.268890Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:02.269211Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, subopera ... PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:03.849608Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:03.849635Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:03.849946Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:03.850476Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:03.851289Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.858615Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.874515Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:03.888149Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.286462Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7500894627137262399:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:04.286501Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a07/r3tmp/tmpdvcyh3/pdisk_1.dat 2025-05-05T09:42:04.303102Z node 8 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5307 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:04.391133Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:04.391171Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-05-05T09:42:04.391553Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:04.394543Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:04.400326Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.404526Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.420708Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.433138Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.782698Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7500894630255460576:2066];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:42:04.783016Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a07/r3tmp/tmpBnhnE6/pdisk_1.dat 2025-05-05T09:42:04.799201Z node 9 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22610 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:04.887240Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:04.887281Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:04.887302Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.888526Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:04.894467Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.911004Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:04.927271Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:05.272403Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7500894634333554254:2233];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000a07/r3tmp/tmpMqOoVG/pdisk_1.dat 2025-05-05T09:42:05.281375Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-05-05T09:42:05.290369Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11451 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:05.377323Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:05.377358Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:05.377780Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:42:05.378336Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:42:05.380031Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:05.392198Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:05.414415Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:05.428915Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-05-05T09:41:51.270694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894574448949604:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:51.270737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af2/r3tmp/tmpHDUHax/pdisk_1.dat 2025-05-05T09:41:51.348442Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11774, node 1 2025-05-05T09:41:51.361439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:41:51.361454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:41:51.361457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:41:51.361522Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-05T09:41:51.375394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:51.375423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:51.376519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7689 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:51.416673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:51.427414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:56.270983Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7500894574448949604:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:56.271090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000af2/r3tmp/tmpwm8iby/pdisk_1.dat 2025-05-05T09:42:05.621833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T09:42:05.629523Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27770, node 2 2025-05-05T09:42:05.667205Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:42:05.667220Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:42:05.667222Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:42:05.667266Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25193 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:42:05.703271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:42:05.703298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:42:05.703675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:05.707398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:42:05.711178Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:42:05.759900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[aggregate-avg_and_sum-default.txt-Results] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--Results] [SKIPPED] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] >> test.py::test[in-in_sorted_by_tuple--Results] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[join-join_no_correlation_in_order_by--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] [GOOD] >> test.py::test[aggregate-no_compact_distinct--ForceBlocks] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test.py::test[blocks-interval_div_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_div_scalar--Results] >> test.py::test[like-ilike_clause-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] >> test.py::test[join-join_no_correlation_in_order_by-off-ForceBlocks] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test.py::test[like-like_clause-default.txt-ForceBlocks] >> test.py::test[aggregate-no_compact_distinct--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-no_compact_distinct--Results] [SKIPPED] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-ForceBlocks] >> test_disposition.py::TestContinueMode::test_disposition_time_ago[v1-mvp_external_ydb_endpoint0] >> test.py::test[lambda-lambda_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Results] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> test.py::test[coalesce-coalesce_few_opt--Results] [GOOD] >> test.py::test[column_group-hint_anon-perusage-Results] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Results] [SKIPPED] >> test.py::test[column_group-length-perusage-Results] [SKIPPED] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] >> test.py::test[column_group-min_group-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert--Results] >> test.py::test[blocks-pg_from_dates--Results] [GOOD] >> test.py::test[blocks-sort_one_desc--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint8--Results] [GOOD] >> test.py::test[blocks-bitcast_scalar--Results] >> test.py::test[lambda-lambda_simple-default.txt-Results] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-ForceBlocks] >> test.py::test[blocks-interval_div_scalar--Results] [GOOD] >> test.py::test[blocks-minmax_tuple--ForceBlocks] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test.py::test[join-premap_merge_extrasort1-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_inner-off-ForceBlocks] >> YdbSdkSessionsPool::PeriodicTask/0 >> test.py::test[window-win_func_first_last_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Results] >> test.py::test[produce-reduce_by_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Results] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] >> YdbSdkSessionsPool1Session::RunSmallPlan/0 |81.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/0 >> test.py::test[window-win_by_all_aggregate--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--ForceBlocks] |81.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> YdbSdkSessionsPool::WaitQueue/0 >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> YdbSdkSessionsPool::StressTestSync/0 >> YdbSdkSessionsPool::WaitQueue/1 >> test.py::test[ansi_idents-escaped_udf_name-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-Results] >> YdbSdkSessionsPool::StressTestSync/1 >> YdbSdkSessionsPool1Session::CustomPlan/0 >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] [GOOD] >> test.py::test[order_by-limit--ForceBlocks] >> test.py::test[like-like_clause-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-ForceBlocks] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[produce-reduce_by_struct-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python--Results] [SKIPPED] >> test.py::test[ql_filter-integer_select_other--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_null--ForceBlocks] >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test.py::test[blocks-bitcast_scalar--Results] [GOOD] >> test.py::test[blocks-boolean_ops--Results] >> test.py::test[window-win_func_first_last_with_part--Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--ForceBlocks] >> test.py::test[ansi_idents-escaped_udf_name-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-ForceBlocks] >> test.py::test[blocks-sort_one_desc--Results] [GOOD] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] >> test.py::test[like-like_clause-default.txt-Results] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-ForceBlocks] >> test.py::test[limit-limit_skip_take-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-Results] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_delay--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_star--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_zero_delay--Results] >> test.py::test[aggregate-group_by_hop_zero_delay--Results] [SKIPPED] >> test.py::test[join-premap_merge_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_inner-off-Results] >> test.py::test[join-premap_merge_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] |82.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[blocks-sort_one_desc--Results] [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--ForceBlocks] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |82.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[aggregate-group_by_hop_zero_delay--Results] [SKIPPED] >> test.py::test[limit-limit_skip_take-default.txt-Results] [GOOD] >> test.py::test[lineage-list_literal3-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal3-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[column_order-insert--Results] [GOOD] >> test.py::test[lineage-topsort-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset--ForceBlocks] >> test.py::test[ql_filter-integer_select_other--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_select_other--Results] >> test.py::test[order_by-limit--ForceBlocks] [GOOD] >> test.py::test[order_by-limit--Results] >> test.py::test[in-in_sorted_by_tuple--Results] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_auditlog.py::test_dynconfig >> test.py::test[blocks-minmax_tuple--ForceBlocks] [GOOD] >> test.py::test[blocks-minmax_tuple--Results] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] [GOOD] >> test.py::test[tpch-q14-default.txt-ForceBlocks] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[column_order-insert--Results] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] >> test.py::test[join-left_join_right_pushdown_null--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_null--Results] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] >> test.py::test[ql_filter-integer_select_other--Results] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-ForceBlocks] >> test.py::test[blocks-boolean_ops--Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter_opt--Results] >> test.py::test[limit-empty_read_after_limit-default.txt-ForceBlocks] [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[limit-empty_read_after_limit-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] >> test.py::test[order_by-limit--Results] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-ForceBlocks] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] [GOOD] >> test.py::test[window-win_func_in_lib--Results] >> test.py::test[window-win_func_rank_by_opt_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Results] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-ForceBlocks] >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test.py::test[blocks-minmax_tuple--Results] [GOOD] >> test.py::test[blocks-pg--ForceBlocks] >> test.py::test[optimizers-combinebykey_fields_subset--ForceBlocks] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset--Results] |82.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-left_join_right_pushdown_null--Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-on-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_native-on-Results] [SKIPPED] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Results] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[tpch-q14-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_simp--Results] >> test.py::test[sampling-mapjoin_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] >> test.py::test[blocks-combine_all_max_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_sum--Results] >> test.py::test[optimizers-combinebykey_fields_subset--Results] [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[optimizers-keepworld_emptyflatmap--ForceBlocks] >> test.py::test[pg-join_using_tables2-default.txt-ForceBlocks] [GOOD] |82.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[bigdate-table_yt_native-on-Results] [SKIPPED] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[join-left_join_right_pushdown_null--Results] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-Results] |82.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[window-win_func_rank_by_opt_part--Results] [GOOD] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] >> test.py::test[tpch-q14-default.txt-Results] [GOOD] >> test.py::test[tpch-q8-default.txt-ForceBlocks] >> test.py::test[window-win_func_in_lib--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Results] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-topsort-default.txt-ForceBlocks] >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> test.py::test[blocks-pg--ForceBlocks] [GOOD] >> test.py::test[blocks-pg--Results] >> TFlatTest::SplitEmptyTwice [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-Results] [GOOD] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[pg-tpcds-q04-default.txt-ForceBlocks] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Results] [GOOD] >> test.py::test[insert-insert_from_other--Results] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[optimizers-keepworld_emptyflatmap--ForceBlocks] [GOOD] >> test.py::test[optimizers-keepworld_emptyflatmap--Results] >> test.py::test[blocks-pg--Results] [GOOD] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-05-05T09:41:53.015154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894580872958790:2209];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:53.015223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ae6/r3tmp/tmpFXlK7R/pdisk_1.dat 2025-05-05T09:41:53.081733Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32501 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-05-05T09:41:53.115377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:53.115402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:53.116526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:53.157640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-05-05T09:41:53.170061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:53.242006Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-05-05T09:41:53.242461Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-05-05T09:41:53.247795Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-05-05T09:41:53.248375Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438113273 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438113273 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-05-05T09:41:53.545580Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.40, eph 1} end=0, 2 blobs 435r (max 435), put Spent{time=0.003s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (125948 0 0)b }, ecr=1.000 2025-05-05T09:41:53.545699Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.41, eph 1} end=0, 2 blobs 1305r (max 1305), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (85573 0 0)b }, ecr=1.000 2025-05-05T09:41:53.555395Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.56, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.001s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-05T09:41:53.555470Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.57, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.001s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-05T09:41:53.556964Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.58, eph 1} end=0, 2 blobs 570r (max 570), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (36363 0 0)b }, ecr=1.000 2025-05-05T09:41:53.558183Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.59, eph 1} end=0, 2 blobs 1707r (max 1707), put Spent{time=0.003s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (117272 0 0)b }, ecr=1.000 2025-05-05T09:41:53.571203Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.615, eph 1} end=0, 2 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-05-05T09:41:53.585708Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.616, eph 1} end=0, 2 blobs 10001r (max 10001), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-05-05T09:41:53.592629Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.82, eph 2} end=0, 2 blobs 687r (max 688), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (198722 0 0)b }, ecr=1.000 2025-05-05T09:41:53.595465Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.83, eph 2} end=0, 2 blobs 2061r (max 2064), put Spent{time=0.007s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (135091 0 0)b }, ecr=1.000 2025-05-05T09:41:53.607440Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.104, eph 3} end=0, 2 blobs 938r (max 939), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (271254 0 0)b }, ecr=1.000 2025-05-05T09:41:53.612842Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.105, eph 3} end=0, 2 blobs 2814r (max 2817), put Spent{time=0.009s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (184414 0 0)b }, ecr=1.000 2025-05-05T09:41:53.617137Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.122, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-05T09:41:53.617279Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.123, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-05T09:41:53.621772Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.124, eph 2} end=0, 2 blobs 1082r (max 1082), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (68819 0 0)b }, ecr=1.000 2025-05-05T09:41:53.621892Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.121, eph 2} end=0, 2 blobs 3243r (max 3243), put Spent{time=0.007s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (222602 0 0)b }, ecr=1.000 2025-05-05T09:41:53.627107Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1127, eph 2} end=0, 2 blobs 3r (max 5), put Spent{time=0.005s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-05-05T09:41:53.636653Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1126, eph 2} end=0, 2 blobs 10001r (max 10506), put Spent{time=0.015s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-05-05T09:41:53.636890Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.141, eph 4} end=0, 2 blobs 1195r (max 1196), put Spent{time=0.013s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (345463 0 0)b }, ecr=1.000 2025-05-05T09:41:53.638044Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.142, eph 4} end=0, 2 blobs 3585r (max 3588), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (234907 0 0)b }, ecr=1.000 2025-05-05T09:41:53.647061Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.165, eph 5} end=0, 2 blobs 4338r (max 4341), put Spent{time=0.006s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (284230 0 0)b }, ecr=1.000 2025-05-05T09:41:53.647101Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.164, eph 5} end=0, 2 blobs 1446r (max 1447), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (417995 0 0)b }, ecr=1.000 2025-05-05T09:41:53.650876Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.178, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-05-05T09:41:53.650960Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.179, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-05-05T09:41:53.654418Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.180, eph 3} end=0, 2 blobs 1588r (max 1588), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (100947 0 0)b }, ecr=1.000 2025-05-05T09:41:53.654554Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1634, eph 3} end=0, 2 blobs 3r (max 5), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-05-05T09:41:53.657167Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.177, eph 3} end=0, 2 blobs 4761r (max 4761), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (326771 0 0)b }, ecr=1.000 2025-05-05T09:41:53.669547Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1633, eph 3} end=0, 2 blobs 10001r (max 10502), put Spent{time=0.015s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-05-05T09:41:53.671152Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.197, eph 6 ... ARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:42:22.677036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715690:0, at schemeshard: 72057594046644480 2025-05-05T09:42:22.677039Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715690:0 ProgressState, at schemeshard: 72057594046644480 2025-05-05T09:42:22.677129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 9 2025-05-05T09:42:22.677165Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-05-05T09:42:22.677172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-05-05T09:42:22.677176Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-05-05T09:42:22.677178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-05-05T09:42:22.677181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715690, ready parts: 1/1, is published: true 2025-05-05T09:42:22.677195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7500894708081508527:2416] message: TxId: 281474976715690 2025-05-05T09:42:22.677197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-05-05T09:42:22.677201Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715690:0 2025-05-05T09:42:22.677203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715690:0 2025-05-05T09:42:22.677246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-05-05T09:42:22.678226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-05-05T09:42:22.678294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-05-05T09:42:22.678382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-05-05T09:42:22.678388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-05-05T09:42:22.679467Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-05-05T09:42:22.680098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894708081508136 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-05-05T09:42:22.680127Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:42:22.680204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894708081508142 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-05-05T09:42:22.680214Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:42:22.680232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894708081508313 RawX2: 4503608217307464 } TabletId: 72075186224037893 State: 4 2025-05-05T09:42:22.680235Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:42:22.680249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894708081508315 RawX2: 4503608217307465 } TabletId: 72075186224037895 State: 4 2025-05-05T09:42:22.680253Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:42:22.680267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894708081507832 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-05-05T09:42:22.680270Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:42:22.680284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7500894708081508317 RawX2: 4503608217307466 } TabletId: 72075186224037894 State: 4 2025-05-05T09:42:22.680288Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-05-05T09:42:22.680331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:42:22.680352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:42:22.680367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:42:22.680379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:42:22.680387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:42:22.680393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-05-05T09:42:22.681275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-05-05T09:42:22.681355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-05-05T09:42:22.681419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-05-05T09:42:22.681447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-05-05T09:42:22.681473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-05-05T09:42:22.681513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-05-05T09:42:22.681539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-05-05T09:42:22.681557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-05-05T09:42:22.681582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-05-05T09:42:22.681604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-05-05T09:42:22.681638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-05-05T09:42:22.681661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-05-05T09:42:22.681696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-05-05T09:42:22.681705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-05-05T09:42:22.681729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-05-05T09:42:22.681739Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-05-05T09:42:22.681750Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-05-05T09:42:22.681752Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-05-05T09:42:22.681754Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-05-05T09:42:22.681756Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-05-05T09:42:22.681758Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-05-05T09:42:22.681815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-05-05T09:42:22.681825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-05-05T09:42:22.681835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-05-05T09:42:22.681837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-05-05T09:42:22.681920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-05-05T09:42:22.681931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-05-05T09:42:22.681940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-05-05T09:42:22.681941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-05-05T09:42:22.681950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-05-05T09:42:22.681951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-05-05T09:42:22.682046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-05-05T09:42:22.682056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-05-05T09:42:22.682088Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] [GOOD] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[lineage-join_as_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-join_as_struct-default.txt-Results] [SKIPPED] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test.py::test[blocks-combine_all_sum--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--Results] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] |82.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[blocks-pg--Results] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test.py::test[optimizers-keepworld_emptyflatmap--Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-ForceBlocks] |82.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[lineage-join_as_struct-default.txt-Results] [SKIPPED] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] >> test.py::test[sampling-topsort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-topsort-default.txt-Results] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] >> test.py::test[insert-insert_from_other--Results] [GOOD] >> test.py::test[insert-override_view_fail--Results] >> test.py::test[pg-tpcds-q04-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q04-default.txt-Results] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_simp--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[insert-override_view_fail--Results] [GOOD] >> test.py::test[insert-replace_inferred--Results] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] >> test.py::test[pg-tpcds-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-ForceBlocks] >> test.py::test[window-win_func_lead_lag_worm--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] >> test.py::test[tpch-q8-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q8-default.txt-Results] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[blocks-combine_hashed_count_filter--Results] [GOOD] >> test.py::test[blocks-date_equals--Results] |82.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-ForceBlocks] |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010e6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit.txt 2025-05-05T09:42:20.453805Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[insert-replace_inferred--Results] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Results] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[tpch-q8-default.txt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--ForceBlocks] >> test.py::test[select-create_structures-default.txt-ForceBlocks] >> test.py::test[join-equi_join_two_mult_keys-off-Results] >> test.py::test[join-equi_join_two_mult_keys-off-Results] [SKIPPED] >> test.py::test[join-full_equal_null-off-Results] [SKIPPED] >> test.py::test[join-join_key_cmp_udf-off-Results] [SKIPPED] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] >> test.py::test[pg-tpcds-q13-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> YdbSdkSessionsPool::PeriodicTask/0 [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010e3/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dynconfig/audit.txt 2025-05-05T09:42:22.516420Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> test_auditlog.py::test_dml_begin_commit_logged >> test.py::test[pg-tpcds-q13-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q35-default.txt-ForceBlocks] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> YdbSdkSessions::TestMultipleSessions |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] >> YdbSdkSessions::MultiThreadSync >> test.py::test[insert-trivial_literals-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010e0/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit.txt 2025-05-05T09:42:23.216267Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010e2/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit.txt 2025-05-05T09:42:23.411493Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:23.411475Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T09:42:23.385035Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> test.py::test[type_v3-ignore_v3_pragma--ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Results] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> test.py::test[select-create_structures-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-create_structures-default.txt-Results] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Results] [SKIPPED] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--ForceBlocks] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> test.py::test[join-cbo_4tables_only_sorted_merge--Results] [SKIPPED] >> test.py::test[join-grace_join1--Results] [SKIPPED] >> test.py::test[join-inner_all_right--Results] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient |82.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] [GOOD] >> test.py::test[aggregate-group_by_hop--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_bad_interval--Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] [GOOD] >> test.py::test[window-win_func_special--Results] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:195: Test is failing right now |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> test.py::test[select-create_structures-default.txt-Results] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-ForceBlocks] >> test.py::test[type_v3-ignore_v3_pragma--Results] [GOOD] >> test.py::test[view-secure--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:543: Enable after accepting a pull request with merging configs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010da/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2025-05-05T09:42:25.482755Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:25.482741Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-05-05T09:42:25.467370Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:243: Test is failing right now >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] [GOOD] >> test.py::test[join-join_without_column-off-Results] [SKIPPED] >> test.py::test[join-left_cast_to_string--Results] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] [GOOD] >> test.py::test[type_v3-type_subset--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-type_subset--Results] >> test.py::test[type_v3-type_subset--Results] [SKIPPED] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-ForceBlocks] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:583: Enable after accepting a pull request with merging configs >> test.py::test[pg-tpcds-q35-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q35-default.txt-Results] >> test.py::test[view-secure--ForceBlocks] [GOOD] >> test.py::test[view-secure--Results] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--ForceBlocks] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> test.py::test[pg-tpcds-q35-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-ForceBlocks] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] >> test.py::test[join-inner_all_right--Results] [GOOD] >> test.py::test[join-inner_with_select--Results] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[select-dot_in_alias-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Results] >> test.py::test[blocks-date_equals--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.1%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/test-results/pytest/{meta.json ... results_accumulator.log} |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] >> test.py::test[window-win_func_special--Results] [GOOD] >> test.py::test[window-win_inline_spec-default.txt-Results] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Results] [GOOD] >> test.py::test[select-missing_with_nonpersist--ForceBlocks] [SKIPPED] >> test.py::test[weak_field-weak_field_in_group_by--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Results] >> test.py::test[select-missing_with_nonpersist--Results] [SKIPPED] >> test.py::test[select-substring-default.txt-ForceBlocks] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] [GOOD] >> test.py::test[view-view_with_lambda--ForceBlocks] >> test.py::test[join-left_cast_to_string--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_null--Results] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-Results] |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[weak_field-weak_field_in_group_by--Results] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--ForceBlocks] >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-ForceBlocks] |83.2%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part14/test-results/pytest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010c2/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit.txt 2025-05-05T09:42:31.881615Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010c1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit.txt 2025-05-05T09:42:31.995484Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test.py::test[join-inner_with_select--Results] [GOOD] >> test.py::test[join-inner_with_select-off-Results] [SKIPPED] >> test.py::test[join-join_cbo_3_tables--Results] >> test.py::test[select-substring-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-substring-default.txt-Results] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[view-view_with_lambda--ForceBlocks] [GOOD] >> test.py::test[view-view_with_lambda--Results] >> TObjectStorageListingTest::ManyDeletes [GOOD] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] >> test.py::test[order_by-SortByTwoFieldsDesc--ForceBlocks] [GOOD] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[window-win_inline_spec-default.txt-Results] [GOOD] >> test.py::test[ypath-empty_range--Results] [SKIPPED] >> test.py::test[ypath-limit_with_key-default.txt-Results] >> test.py::test[select-substring-default.txt-Results] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-05-05T09:41:50.369894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894571112643194:2067];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:50.370177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b01/r3tmp/tmpBmAgf7/pdisk_1.dat 2025-05-05T09:41:50.429818Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18246, node 1 2025-05-05T09:41:50.451129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:41:50.451145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:41:50.451147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:41:50.451191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63494 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:50.502245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:50.502265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:50.503105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:50.503221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:50.533207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:52.461818Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500894577904363883:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:52.461874Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000b01/r3tmp/tmpuAbzKU/pdisk_1.dat 2025-05-05T09:41:52.479376Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19446, node 2 2025-05-05T09:41:52.490405Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:41:52.490417Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:41:52.490419Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:41:52.490546Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6043 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-05-05T09:41:52.565574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:52.565609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:52.565991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:52.566628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-05-05T09:41:52.581816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 ....2025-05-05T09:41:57.462252Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7500894577904363883:2064];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:57.462348Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; ....2025-05-05T09:42:07.476275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-05-05T09:42:07.476333Z node 2 :IMPORT WARN: Table profiles were not loaded .. 2025-05-05T09:42:17.458582Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-05-05T09:42:17.458626Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-05-05T09:42:17.458899Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037890 2025-05-05T09:42:17.458948Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037889 2025-05-05T09:42:17.459124Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2025-05-05T09:42:17.459128Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-05-05T09:42:17.459251Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037891 2025-05-05T09:42:17.459272Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976719700 at tablet 72075186224037892 2025-05-05T09:42:17.460018Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-05-05T09:42:17.460068Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-05-05T09:42:17.460514Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-05-05T09:42:17.460529Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037891 2025-05-05T09:42:17.462738Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976719700 at step 1746438137507 at tablet 72075186224037889 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746438137507 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-05-05T09:42:17.462757Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-05T09:42:17.462764Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976719700 at step 1746438137507 at tablet 72075186224037892 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746438137507 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2025-05-05T09:42:17.462774Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-05-05T09:42:17.462807Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-05-05T09:42:17.462827Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-05-05T09:42:17.462838Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746438137507:281474976719700] in PlanQueue unit at 72075186224037889 2025-05-05T09:42:17.462839Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-05-05T09:42:17.462844Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-05-05T09:42:17.462851Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746438137507:281474976719700] in PlanQueue unit at 72075186224037892 2025-05-05T09:42:17.462857Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 got data tx from cache 1746438137507:281474976719700 2025-05-05T09:42:17.462866Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037892 got data tx from cache 1746438137507:281474976719700 2025-05-05T09:42:17.463559Z node 2 :TX_DATASHARD DEBUG: tx 281474976719700 released its data 2025-05-05T09:42:17.463560Z node 2 :TX_DATASHARD DEBUG: tx 281474976719700 released its data 2025-05-05T09:42:17.463570Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T09:42:17.463575Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T09:42:17.463707Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976719700 at step 1746438137507 at tablet 72075186224037890 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746438137507 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-05-05T09:42:17.463721Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-05T09:42:17.463734Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976719700 at step 1746438137507 at tablet 72075186224037891 { Transactions { TxId: 281474976719700 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746438137507 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-05-05T09:42:17.463737Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-05-05T09:42:17.463744Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-05-05T09:42:17.463747Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-05-05T09:42:17.463752Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746438137507:281474976719700] in PlanQueue unit at 72075186224037890 2025-05-05T09:42:17.463761Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 720751 ... TASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-05-05T09:42:41.723228Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746438161769:281474976721711] in PlanQueue unit at 72075186224037892 2025-05-05T09:42:41.723234Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037892 got data tx from cache 1746438161769:281474976721711 2025-05-05T09:42:41.723423Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T09:42:41.723433Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T09:42:41.723484Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976721711 at step 1746438161769 at tablet 72075186224037891 { Transactions { TxId: 281474976721711 AckTo { RawX1: 0 RawX2: 0 } } Step: 1746438161769 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-05-05T09:42:41.723491Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-05-05T09:42:41.723508Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-05-05T09:42:41.723516Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-05-05T09:42:41.723518Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1746438161769:281474976721711] in PlanQueue unit at 72075186224037891 2025-05-05T09:42:41.723522Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1746438161769:281474976721711 2025-05-05T09:42:41.723681Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T09:42:41.723743Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-05-05T09:42:41.723892Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037890 restored its data 2025-05-05T09:42:41.724049Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T09:42:41.724058Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T09:42:41.724086Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1746438161769} 2025-05-05T09:42:41.724183Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-05-05T09:42:41.724274Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037889 restored its data 2025-05-05T09:42:41.724592Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T09:42:41.724604Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T09:42:41.724786Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1746438161769} 2025-05-05T09:42:41.724829Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-05-05T09:42:41.724945Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037892 restored its data 2025-05-05T09:42:41.725106Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T09:42:41.725116Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T09:42:41.725142Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-05-05T09:42:41.725222Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037890 restored its data 2025-05-05T09:42:41.725365Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T09:42:41.725618Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1746438161769} 2025-05-05T09:42:41.725656Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1746438161769} 2025-05-05T09:42:41.725663Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-05-05T09:42:41.725681Z node 2 :TX_DATASHARD DEBUG: Complete [1746438161769 : 281474976721711] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7500894788357827931:17801], exec latency: 0 ms, propose latency: 2 ms 2025-05-05T09:42:41.725693Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-05-05T09:42:41.725821Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-05-05T09:42:41.725920Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037889 restored its data 2025-05-05T09:42:41.726965Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-05-05T09:42:41.727110Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037892 restored its data 2025-05-05T09:42:41.727284Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T09:42:41.727446Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-05-05T09:42:41.727469Z node 2 :TX_DATASHARD DEBUG: Complete [1746438161769 : 281474976721711] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7500894788357827931:17801], exec latency: 3 ms, propose latency: 5 ms 2025-05-05T09:42:41.727483Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-05-05T09:42:41.727531Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 released its data 2025-05-05T09:42:41.727535Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-05-05T09:42:41.728250Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-05-05T09:42:41.728276Z node 2 :TX_DATASHARD DEBUG: Complete [1746438161769 : 281474976721711] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7500894788357827931:17801], exec latency: 3 ms, propose latency: 5 ms 2025-05-05T09:42:41.728288Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-05-05T09:42:41.741548Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-05-05T09:42:41.741887Z node 2 :TX_DATASHARD DEBUG: tx 281474976721711 at 72075186224037889 restored its data 2025-05-05T09:42:41.748275Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-05-05T09:42:41.752350Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-05-05T09:42:41.752394Z node 2 :TX_DATASHARD DEBUG: Complete [1746438161769 : 281474976721711] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7500894788357827931:17801], exec latency: 25 ms, propose latency: 29 ms 2025-05-05T09:42:41.752412Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-05-05T09:42:41.768508Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.768776Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.768912Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.769002Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.769047Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-05-05T09:42:41.769123Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.769145Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.769220Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.769310Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.769374Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-05-05T09:42:41.769435Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2025-05-05T09:42:41.769494Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2025-05-05T09:42:41.769537Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2025-05-05T09:42:41.769558Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010af/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit.txt 2025-05-05T09:42:33.623654Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:33.623639Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-05-05T09:42:33.609757Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[view-view_with_lambda--Results] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--ForceBlocks] |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[order_by-SortByTwoFieldsDesc--ForceBlocks] [GOOD] |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> test.py::test[pg-tpcds-q75-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Results] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:534: Enable after interactive tx support |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TFlatTest::AutoSplitMergeQueue [GOOD] >> test.py::test[window-empty/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Results] >> test.py::test[pg-tpcds-q75-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-ForceBlocks] |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[ypath-limit_with_key-default.txt-Results] [GOOD] >> test.py::test[ypath-multi_range-default.txt-Results] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-left_join_right_pushdown_null--Results] [GOOD] |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-05-05T09:41:54.213334Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500894586059693640:2201];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:41:54.214174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ab9/r3tmp/tmpdgkOSb/pdisk_1.dat 2025-05-05T09:41:54.289193Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19647 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-05-05T09:41:54.357065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:41:54.357093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:41:54.358038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T09:41:54.359640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T09:41:54.363252Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:41:54.384056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438114491 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) A-0 B-0 2025-05-05T09:41:54.650736Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.9, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T09:41:54.653568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-05-05T09:41:54.658420Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T09:41:54.658908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-05-05T09:41:54.753561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-05T09:41:54.753629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-05-05T09:41:54.753655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2025-05-05T09:41:54.753697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-05-05T09:41:54.754035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2025-05-05T09:41:54.811715Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 2} end=0, 2 blobs 1r (max 1), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T09:41:54.812511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-05-05T09:41:54.821453Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 3 blobs 2r (max 2), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583126 0 0)b }, ecr=1.000 2025-05-05T09:41:54.822974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-05-05T09:41:54.912857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-05T09:41:54.912916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-05-05T09:41:54.912940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583004 2025-05-05T09:41:54.912979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-05-05T09:41:54.913400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2025-05-05T09:41:54.980298Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 3} end=0, 2 blobs 1r (max 1), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T09:41:54.980766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-05-05T09:41:54.991859Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 3r (max 3), put Spent{time=0.011s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874656 0 0)b }, ecr=1.000 2025-05-05T09:41:54.992937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-05-05T09:41:55.080734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-05T09:41:55.080780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874506 row count 3 2025-05-05T09:41:55.080803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874506 2025-05-05T09:41:55.080845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-05-05T09:41:55.081220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-2 2025-05-05T09:41:55.124648Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 4} end=0, 2 blobs 1r (max 1), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T09:41:55.127857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 25166008 rowCount 4 cpuUsage 0 2025-05-05T09:41:55.150661Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 4} end=0, 5 blobs 4r (max 4), put Spent{time=0.023s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (25166202 0 0)b }, ecr=1.000 2025-05-05T09:41:55.151392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 25166024 rowCount 4 cpuUsage 0 2025-05-05T09:41:55.228142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-05-05T09:41:55.228190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 25166024 row count 4 2025-05-05T09:41:55.228216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0, RowCount 4, DataSize 25166024 2025-05-05T09:41:55.228265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186224037888 2025-05-05T09:41:55.228627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-2 2025-05-05T09:41:55.257435Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph 5} end=0, 2 blobs 1r (max 1), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-05-05T09:41:55.260926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 31457526 rowCount 5 cpuUsage 0 2025-05-05T09:41:55.305728Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.31, eph 5} end=0, 5 blobs 5r (max 5), put Spent{time=0.047s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31457748 0 0)b }, ecr=1.000 2025-05-05T09:41:55.308136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 720 ... PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) 2025-05-05T09:42:37.588990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-05-05T09:42:37.589014Z node 3 :IMPORT WARN: Table profiles were not loaded TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 6 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumn... (TRUNCATED) 2025-05-05T09:42:39.141734Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-05-05T09:42:39.141749Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-05-05T09:42:39.141751Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-05-05T09:42:39.141802Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-05-05T09:42:39.243045Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037898 not found 2025-05-05T09:42:39.243192Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037900 not found 2025-05-05T09:42:39.243201Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037899 not found 2025-05-05T09:42:39.243537Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 10 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-05-05T09:42:44.348787Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037902 not found 2025-05-05T09:42:44.348805Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037901 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 11 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1746438142757 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 11 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) >> test.py::test[join-left_only_with_other--Results] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected >> test.py::test[weak_field-weak_field_aggregation--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] >> test.py::test[select-table_content_from_double_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/001095/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2025-05-05T09:42:37.896650Z: {"tx_id":"01jtfx6xj831vxdzch7a8hf1qh","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:37.896641Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-05-05T09:42:37.896397Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-05-05T09:42:37.921677Z: {"tx_id":"01jtfx6xj831vxdzch7a8hf1qh","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:37.921666Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T09:42:37.899963Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:37.926852Z: {"tx_id":"01jtfx6xj831vxdzch7a8hf1qh","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:37.926839Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-05-05T09:42:37.925294Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |83.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/test-results/pytest/{meta.json ... results_accumulator.log} |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-ForceBlocks] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[pg-tpcds-q93-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Results] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[select-table_content_from_double_opt-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_aggregation--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_name--ForceBlocks] >> test.py::test[ypath-multi_range-default.txt-Results] [GOOD] |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-join_cbo_3_tables--Results] [GOOD] >> test.py::test[join-join_comp_common_table--Results] |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[pg-tpcds-q93-default.txt-Results] [GOOD] >> test.py::test[produce-process_sorted_desc_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_streaming-default.txt-ForceBlocks] |83.8%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[blocks-date_greater_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-interval_add_date--Results] >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] |83.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[ypath-multi_range-default.txt-Results] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/001091/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit.txt 2025-05-05T09:42:40.505416Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:40.505404Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-05-05T09:42:40.490124Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |83.9%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> test.py::test[weak_field-weak_field_long_name--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Results] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> test.py::test[join-left_only_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected--Results] |84.0%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part14/test-results/pytest/{meta.json ... results_accumulator.log} |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test.py::test[window-row_number_to_map_multiple-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> test.py::test[weak_field-weak_field_long_name--Results] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--ForceBlocks] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] >> test.py::test[produce-process_streaming-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Results] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/00107b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2025-05-05T09:42:43.617978Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:43.617964Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-05-05T09:42:43.614321Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:43.738379Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:43.738363Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-05-05T09:42:43.733672Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:43.849512Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:43.849498Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-05-05T09:42:43.845510Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:43.971332Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:43.971318Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T09:42:43.966101Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:44.085724Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:44.085711Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-05-05T09:42:44.081502Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:44.195638Z: {"database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:44.195623Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-05-05T09:42:44.191602Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] [GOOD] >> test.py::test[window-udaf_window--ForceBlocks] |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] [GOOD] |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-ForceBlocks] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/001072/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2025-05-05T09:42:45.232911Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test.py::test[blocks-interval_add_date--Results] [GOOD] |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/00106d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2025-05-05T09:42:46.078880Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:46.078863Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T09:42:46.053269Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[blocks-interval_add_date--Results] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can |84.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] >> test.py::test[action-action_nested_query-default.txt-Results] |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] >> test.py::test[join-lookupjoin_not_selected--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache--Results] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/001063/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit.txt |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/001054/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2025-05-05T09:42:49.512418Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:49.512404Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-05-05T09:42:49.486765Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test.py::test[window-udaf_window--ForceBlocks] [GOOD] >> test.py::test[window-udaf_window--Results] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/001056/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2025-05-05T09:42:49.667468Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-05-05T09:42:49.667454Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T09:42:49.644242Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[window-leading/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Results] >> test.py::test[action-action_nested_query-default.txt-Results] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/001051/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit.txt 2025-05-05T09:42:50.356326Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[window-udaf_window--Results] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-ForceBlocks] |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-ForceBlocks] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-lookupjoin_with_cache--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_sharded-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[window-leading/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-ForceBlocks] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/001042/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit.txt 2025-05-05T09:42:52.664095Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:52.664078Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-05-05T09:42:52.642741Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:52.804247Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:52.804232Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-05-05T09:42:52.770074Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:52.928117Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:52.928102Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-05-05T09:42:52.910782Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:53.057104Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:53.057091Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-05-05T09:42:53.035030Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:53.178020Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:53.178003Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-05-05T09:42:53.162779Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-05-05T09:42:53.317484Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:42:53.317466Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-05-05T09:42:53.302492Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/00103e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2025-05-05T09:42:53.334521Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Results] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] [GOOD] >> test.py::test[type_v3-float--ForceBlocks] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[window-presort_window_order_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt-Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[action-eval_if_guard-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-Results] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--ForceBlocks] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/001015/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/001015/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk13/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2860463 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] [GOOD] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[type_v3-float--ForceBlocks] [GOOD] >> test.py::test[type_v3-float--Results] |84.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[window-win_func_first_last_over_nonopt-default.txt-Results] [GOOD] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-left_only_semi_and_other--Results] |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] >> test.py::test[type_v3-float--Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--ForceBlocks] |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test.py::test[join-mergejoin_choose_primary--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--Results] [SKIPPED] >> test.py::test[join-mergejoin_small_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on--Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on-off-Results] >> test.py::test[join-nopushdown_filter_with_depends_on-off-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--ForceBlocks] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--Results] [GOOD] >> test.py::test[udf-udf--ForceBlocks] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fee/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fee/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk11/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2864225 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes |84.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[join-nopushdown_filter_with_depends_on-off-Results] [SKIPPED] |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/00101a/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/00101a/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2859848 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[window-win_func_lead_lag_worm--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Results] >> YdbSdkSessionsPool::StressTestAsync/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/000fd8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2025-05-05T09:43:04.731471Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/00101e/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/00101e/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2859854 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--true] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/00100a/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/00100a/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk10/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2861303 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ffa/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ffa/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2862368 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fde/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fde/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk12/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2868656 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:41:31.449349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:41:31.449375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:31.449379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:41:31.449382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:41:31.449386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:41:31.449389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:41:31.449396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:41:31.449407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:41:31.449480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:41:31.449541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:41:31.457873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:41:31.457899Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:41:31.457968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:41:31.459438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:41:31.459469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:41:31.459490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:41:31.460047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:41:31.460080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:41:31.460165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:31.460207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:41:31.460505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:31.460729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:31.460736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:31.460771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:41:31.460775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:31.460779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:41:31.460798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:41:31.461775Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:41:31.473980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:41:31.474077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:31.474149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:41:31.474224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:41:31.474238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:31.475229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:31.475267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:41:31.475354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:31.475368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:41:31.475373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:41:31.475379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:41:31.475909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:31.475924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:41:31.475930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:41:31.476296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:31.476308Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:31.476314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:31.476322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:41:31.476967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:41:31.477406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:41:31.477453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:41:31.477674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:41:31.477701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:41:31.477709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:31.477767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:41:31.477775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:41:31.477817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:41:31.477830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:41:31.478299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:41:31.478310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:41:31.478364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:41:31.478371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:41:31.478382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:41:31.478390Z node 1 :FLAT_TX_SCHEMESHARD I ... Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T09:43:11.464968Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:43:11.465092Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:43:11.465100Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T09:43:11.465113Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:43:11.465407Z node 262 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:43:11.465446Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:43:11.465450Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:43:11.465454Z node 262 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:43:11.465518Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:43:11.465538Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-05T09:43:11.465777Z node 262 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:43:11.465795Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 1125281433700 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:43:11.465801Z node 262 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-05T09:43:11.465837Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:43:11.465844Z node 262 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:43:11.465847Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:43:11.465850Z node 262 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:43:11.465853Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:43:11.465864Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:43:11.465872Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:43:11.465875Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:43:11.465880Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:43:11.465883Z node 262 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:43:11.465886Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:43:11.465895Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:43:11.465899Z node 262 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:43:11.465902Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T09:43:11.465904Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T09:43:11.466472Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:43:11.466548Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-05-05T09:43:11.466556Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-05T09:43:11.466616Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T09:43:11.466624Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-05T09:43:11.466635Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:43:11.466983Z node 262 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:43:11.467477Z node 262 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:43:11.467490Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:43:11.467538Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:43:11.467566Z node 262 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:43:11.467571Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [262:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:43:11.467576Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [262:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:43:11.467736Z node 262 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:43:11.467749Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:43:11.467757Z node 262 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:43:11.467762Z node 262 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T09:43:11.467767Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:43:11.467957Z node 262 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:43:11.467972Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:43:11.467976Z node 262 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:43:11.467980Z node 262 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T09:43:11.467985Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:43:11.467999Z node 262 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:43:11.468004Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [262:130:2154] 2025-05-05T09:43:11.468062Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:43:11.468067Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T09:43:11.468077Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:43:11.468858Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:43:11.468980Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:43:11.468999Z node 262 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:43:11.469011Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:43:11.469021Z node 262 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:43:11.469025Z node 262 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:43:11.469031Z node 262 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-05T09:43:11.469079Z node 262 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:43:11.469483Z node 262 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T09:43:11.469532Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T09:43:11.469543Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T09:43:11.469626Z node 262 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T09:43:11.469642Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T09:43:11.469647Z node 262 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [262:888:2820] TestWaitNotification: OK eventTxId 1003 >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates >> test.py::test[aggr_factory-avg-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByOneField--Results] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_continue_mode.py::TestContinueMode::test_deny_disposition_from_checkpoint_in_create_query[v1-mvp_external_ydb_endpoint0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/000fcf/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit.txt 2025-05-05T09:43:06.994084Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-05-05T09:43:06.994064Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-05-05T09:43:06.979386Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_3] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[udf-udf--ForceBlocks] [GOOD] >> test.py::test[udf-udf--Results] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] >> test_quota_exhaustion.py::TestYdbWorkload::test >> test.py::test[window-win_func_lead_lag_worm--Results] [GOOD] >> test.py::test[window-win_func_special--ForceBlocks] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--true-YDB] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_auth_system_views.py::test_tenant_auth_groups_access[clusteradmin-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/001000/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/001000/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2861713 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[udf-udf--Results] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-other-admin] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[udf-udf--Results] [GOOD] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] >> test_continue_mode.py::TestContinueMode::test_deny_state_load_mode_from_checkpoint_in_modify_query[v1-mvp_external_ydb_endpoint0] >> test.py::test[aggregate-GroupByOneField--Results] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--true-YDB] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] [GOOD] >> test.py::test[join-left_only_semi_and_other--Results] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fff/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fff/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2861666 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[window-win_func_special--ForceBlocks] [GOOD] >> test.py::test[window-win_func_special--Results] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--false] |84.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[join-left_only_semi_and_other--Results] [GOOD] >> test_quota_exhaustion.py::TestYdbWorkload::test_delete >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[unblock] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fca/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fca/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2874762 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_domain_admin[domain_login_only--false-YDB] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test.py::test[window-win_func_special--Results] [GOOD] >> test.py::test[ypath-empty_range-dynamic-ForceBlocks] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-Results] >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |85.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_user_administration.py::test_database_admin_can_create_user |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_3] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_4] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-fifo] >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--true] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--false] [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_group_by_tenant_admin[domain_login_only--false-YDB] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fd7/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fd7/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2871259 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fc4/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fc4/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk8/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2876637 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] >> test_continue_mode.py::TestContinueMode::test_deny_state_load_mode_from_checkpoint_in_modify_query[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--true] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-std] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v1-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--false] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_4] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_5] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] [GOOD] >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [GOOD] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fbd/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fbd/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk7/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2878654 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_storage_config.py::TestStorageConfig::test_cases[case_9] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types7-all_types7-index7---] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fe3/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000fe3/ydb/tests/datashard/s3/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 2865758 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |85.1%| [TA] $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |85.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--true] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] [GOOD] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] >> test_auth_system_views.py::test_tenant_auth_groups_access[clusteradmin-True] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[clusteruser-False] >> test_storage_config.py::TestStorageConfig::test_cases[case_5] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_6] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] >> test_users_groups_with_acl.py::test_yql_create_user_by_domain_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_domain_admin[domain_login_only--true-YDB] >> test_auth_system_views.py::test_tenant_auth_groups_access[clusteruser-False] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[dbadmin-True] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] >> test_auth_system_views.py::test_tenant_auth_groups_access[dbadmin-True] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[ordinaryuser-False] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-other-admin] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-subgroup] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_ttl.py::TestTTLAlterSettings::test_case |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_auth_system_views.py::test_tenant_auth_groups_access[ordinaryuser-False] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-subgroup] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[rename-admin-group] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[rename-admin-group] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[block] >> test_disposition.py::TestContinueMode::test_disposition_time_ago[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--false] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[block] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[change-password] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--true] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[change-password] [GOOD] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_6] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_7] >> test_tenants.py::TestTenants::test_create_tables[enable_alter_database_create_hive_first--true] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_0] >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[unblock] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_owner >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] >> test_users_groups_with_acl.py::test_query_create_group_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_group_by_tenant_admin[domain_login_only--true-YDB] >> test_user_administration.py::test_database_admin_cant_change_database_owner [GOOD] >> test_user_administration.py::test_user_can_change_password_for_himself[dbadmin] >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--false] >> test_user_administration.py::test_user_can_change_password_for_himself[dbadmin] [GOOD] >> test_user_administration.py::test_user_can_change_password_for_himself[ordinaryuser] >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--false] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> test_user_administration.py::test_user_can_change_password_for_himself[ordinaryuser] [GOOD] |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_9] [GOOD] >> test_storage_config.py::TestStorageConfig::test_create_tablet >> test_storage_config.py::TestStorageConfig::test_create_tablet [GOOD] >> test_user_administration.py::test_database_admin_can_create_user [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[add-subgroup] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format [GOOD] >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_eval.py::TestEval::test_eval_2_2[v1] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_duo--Results] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[add-subgroup] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[add-user] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[add-user] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-admin-group] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-admin-group] [GOOD] >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-himself] >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--false] [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-himself] [GOOD] >> test_system_views.py::TestPartitionStats::test_case >> test_storage_config.py::TestStorageConfig::test_cases[case_7] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_8] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-fifo] [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--true] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_db_counters.py::TestKqpCounters::test_case >> test_users_groups_with_acl.py::test_query_create_user_by_tenant_admin[domain_login_only--false-YDB] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_users_groups_with_acl.py::test_yql_create_user_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--false-YDB] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--false] [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--true] >> test_storage_config.py::TestStorageConfig::test_cases[case_0] [GOOD] >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--true] >> test_storage_config.py::TestStorageConfig::test_cases[case_10] >> test_storage_config.py::TestStorageConfig::test_cases[case_8] [GOOD] >> test_system_views.py::TestPartitionStats::test_case [GOOD] |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test.py::test[aggregate-group_by_cube_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--true] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] >> test_eval.py::TestEval::test_eval_2_2[v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_cpu_quota.py::TestCpuQuota::test_cpu_quota[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=2880614) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:43:32] send response localhost:31417/?database=local ::1 - - [05/May/2025 09:43:32] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:43:47] send response localhost:31417/?database=local ::1 - - [05/May/2025 09:43:47] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--false] [GOOD] >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--false] [GOOD] >> test_system_views.py::TestQueryMetrics::test_case |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_8] [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--true] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--Results] >> test_users_groups_with_acl.py::test_query_create_group_by_tenant_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--false-YDB] >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] >> test_users_groups_with_acl.py::test_query_create_user_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_user_by_tenant_admin[domain_login_only--true-YDB] >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--true] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--true] [GOOD] >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--true] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] [SKIPPED] |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] >> test_storage_config.py::TestStorageConfig::test_cases[case_10] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_11] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_progress_when_tenant_tablets_run_on_dynamic_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] >> test_tenants.py::TestTenants::test_create_drop_create_table2[enable_alter_database_create_hive_first--true] [GOOD] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--false] >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql_empty_database_header[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--false] >> test.py::test[aggregate-group_by_gs_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--false] >> test_storage_config.py::TestStorageConfig::test_cases[case_11] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_12] >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection >> test_tenants.py::TestTenants::test_create_remove_database[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_eval.py::TestEval::test_eval_2_2[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=2814989) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:42:15] send response localhost:6210/?database=local ::1 - - [05/May/2025 09:42:15] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--true-YDB] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--false] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] >> test_create_users.py::test_create_user >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers >> test_storage_config.py::TestStorageConfig::test_cases[case_12] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_1] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] [GOOD] |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_user_administration.py::test_database_admin_cant_change_database_admin_user[change-password] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] [GOOD] >> test.py::test[aggregate-group_by_session--Results] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--true] >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs >> test_tenants.py::TestTenants::test_force_delete_tenant_when_table_has_been_deleted[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--true] >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--true-YDB] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas [GOOD] >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas >> test_storage_config.py::TestStorageConfig::test_cases[case_1] [GOOD] >> test_storage_config.py::TestStorageConfig::test_cases[case_2] >> test_users_groups_with_acl.py::test_query_create_user_by_tenant_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_domain_admin[domain_login_only--false-YDB] >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::OverrideQuotas >> TYdbControlPlaneStorageQuotas::OverrideQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::GetStaleUsage >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--true-YDB] [GOOD] >> TYdbControlPlaneStorageQuotas::GetStaleUsage [GOOD] >> TYdbControlPlaneStorageQuotas::PushUsageUpdate >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--false-YDB] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--false] [GOOD] >> TYdbControlPlaneStorageQuotas::PushUsageUpdate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate >> test.py::test[aggregate-group_by_session--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_users_groups_with_acl.py::test_yql_create_user_by_tenant_admin[domain_login_only--true-YDB] [GOOD] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--true] >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [SKIPPED] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--false] [XFAIL] >> test_create_users.py::test_create_user [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource [GOOD] >> TYdbControlPlaneStorageTest::ShouldCreateTable >> TYdbControlPlaneStorageTest::ShouldCreateTable [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateWrite >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--false] [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateWrite [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateRead |85.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_user_administration.py::test_database_admin_cant_change_database_admin_group[remove-himself] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--true] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateRead [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess >> test_storage_config.py::TestStorageConfig::test_cases[case_2] [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--true] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] [GOOD] >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... geWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2025-05-05T09:44:23.601698Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:44:23.601700Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:44:23.601799Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:44:23.601801Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:23.601802Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:23.603182Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T09:44:23.603207Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-05-05T09:44:23.603210Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:44:23.603210Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:23.603212Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:44:23.603213Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:23.603372Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:44:23.603382Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:44:23.603383Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:44:23.603420Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:44:23.603422Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:23.603423Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:23.603444Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:44:23.603452Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:23.603453Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:23.619084Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:44:23.619106Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:44:23.635179Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:44:23.635198Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:44:23.651513Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:44:23.651534Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:44:23.652308Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:23.652322Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:44:23.652375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:44:23.652380Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:44:23.652539Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:44:23.652542Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:44:23.652578Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:44:23.652589Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:44:23.652608Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:44:23.652610Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:44:23.652673Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:44:23.652676Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:44:23.652678Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:44:23.652685Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:44:23.652736Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:23.652747Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:44:23.652788Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:44:23.652798Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:44:23.652802Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:44:23.652804Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:44:23.652853Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:23.652863Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:44:23.652876Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:23.652879Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries": >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] >> TYdbControlPlaneStorageListBindings::ShouldSuccess >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--false-YDB] [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_storage_config.py::TestStorageConfig::test_cases[case_2] [GOOD] >> test_tenants.py::TestTenants::test_resolve_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] >> TYdbControlPlaneStorageModifyBinding::ShouldValidate >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] >> TYdbControlPlaneStorageListBindings::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByName >> test_create_users_strict_acl_checks.py::test_create_user >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_complete[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--false] >> TYdbControlPlaneStorageModifyBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns >> test_dynamic_tenants.py::test_create_tenant_with_cpu[enable_alter_database_create_hive_first--true] [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByName [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] |85.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_domain_admin[domain_login_only--false-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_domain_admin[domain_login_only--true-YDB] >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldPageToken ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_users_groups_with_acl.py::test_query_create_group_by_domain_admin[domain_login_only--false-YDB] [GOOD] |85.5%| [TA] $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} |85.5%| [TA] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--false] >> test_create_users_strict_acl_checks.py::test_create_user [GOOD] >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--true] [GOOD] >> test_tenants.py::TestTenants::test_create_drop_create_table3[enable_alter_database_create_hive_first--true] [XFAIL] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/protobuf/py3/google/protobuf/text_format.py:568: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/protobuf/py3/google/protobuf/text_format.py:568: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--false] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] >> TYdbControlPlaneStorageListBindings::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldValidate >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_users_groups_with_acl.py::test_query_create_user_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] >> test_users_groups_with_acl.py::test_yql_create_group_by_domain_admin[domain_login_only--true-YDB] [GOOD] >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--false-YDB] >> TYdbControlPlaneStorageListBindings::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_remove_database_wait[enable_alter_database_create_hive_first--true] [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldValidate >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldValidate >> TYdbControlPlaneStorageModifyQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser >> TYdbControlPlaneStorageCreateConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--true] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_register_tenant_and_force_drop_with_table[enable_alter_database_create_hive_first--true] [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--false] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [GOOD] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_create_users_strict_acl_checks.py::test_create_user [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--true] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_custom_coordinator_options[enable_alter_database_create_hive_first--false] [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob >> test_dynamic_tenants.py::test_create_tenant_no_cpu[enable_alter_database_create_hive_first--true] [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageGetResult::ShouldSuccess |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldValidate >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--true] [GOOD] >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePingTask::ShouldValidate >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--false] >> TYdbControlPlaneStorageDeleteConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckSuperUser >> TYdbControlPlaneStorageGetResult::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageGetResult::ShouldEmpty >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [GOOD] >> TYdbControlPlaneStoragePingTask::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckIdempotencyKey >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] >> TYdbControlPlaneStorageGetResult::ShouldEmpty [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAst >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMoveToScope ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:44:51.643368Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:44:51.643370Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:44:51.644973Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T09:44:51.644985Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:51.644986Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:51.644999Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:44:51.645004Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:51.645006Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:51.645132Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:44:51.645140Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:51.645141Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:51.645169Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T09:44:51.645172Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:44:51.645173Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:44:51.645228Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:44:51.645237Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:51.645238Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:51.656931Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:44:51.656949Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:44:51.678959Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:44:51.678978Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:44:51.686152Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:44:51.686169Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:44:51.686189Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:44:51.686198Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:44:51.686422Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:44:51.686430Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:44:51.686429Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:51.686432Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:44:51.686494Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:44:51.686502Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:44:51.686504Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:44:51.686506Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:44:51.696186Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:44:51.696205Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:44:51.696370Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:44:51.696395Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:44:51.696440Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:51.696447Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:44:51.696530Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:51.696533Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:44:51.696535Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:44:51.696538Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:44:51.696593Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:51.696605Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:44:51.696651Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:44:51.696674Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys": |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionSuccess |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_drop_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility >> TYdbControlPlaneStoragePipeline::ShouldCheckAst [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMoveToScope [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckIdempotencyKey |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_users_groups_with_acl.py::test_yql_create_group_by_tenant_admin[domain_login_only--false-YDB] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-std] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... difyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:57.973690Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:57.974445Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T09:44:57.974459Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:44:57.974462Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:44:57.974563Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:44:57.974572Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:57.974574Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:57.974656Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2025-05-05T09:44:57.974673Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:44:57.974675Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:44:57.974841Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:44:57.974853Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:57.974854Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:57.986612Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:44:57.986634Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:44:58.007876Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:44:58.007897Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:44:58.018391Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:58.018427Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:44:58.018672Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:58.018686Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:44:58.018741Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:58.018747Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:44:58.018850Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:44:58.018861Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:44:58.018933Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:58.018945Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:44:58.019037Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:44:58.019045Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:44:58.019103Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:44:58.019111Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:44:58.019159Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:44:58.019166Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:44:58.019209Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:44:58.019213Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:44:58.019216Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:44:58.019217Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:44:58.019260Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:44:58.019267Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:44:58.024588Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:44:58.024612Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:44:58.024620Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:44:58.024623Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyConnection::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:44:58.556116Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:185: Revision of the connection has been changed already. Please restart the request with a new revision 2025-05-05T09:44:58.556462Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user@staff, utcuebjuiptn272j25fi] ModifyConnectionRequest: {connection_id: "utcuebjuiptn272j25fi" content { name: "test_connection_name_2" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } previous_revision: 10 } ERROR: {
: Error: Revision of the connection has been changed already. Please restart the request with a new revision, code: 1003 } >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivate >> test_dynamic_tenants.py::test_drop_tenant_without_nodes_could_continue[enable_alter_database_create_hive_first--true] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-std] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... n OK 2025-05-05T09:44:58.776685Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:58.776686Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:58.777266Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T09:44:58.777276Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:44:58.777277Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:44:58.777706Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-05-05T09:44:58.777724Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:44:58.777726Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:44:58.777958Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T09:44:58.777983Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:58.777984Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:58.778008Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T09:44:58.778011Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:44:58.778013Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:44:58.778177Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2025-05-05T09:44:58.778187Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:44:58.778188Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:44:58.778606Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-05-05T09:44:58.778629Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:44:58.778631Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:44:58.790109Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:44:58.790131Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:44:58.813973Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:44:58.814016Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:44:58.830992Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:44:58.831017Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:44:58.831084Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:44:58.831109Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:44:58.831334Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:44:58.831347Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:44:58.831377Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:44:58.831389Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:44:58.831427Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:44:58.831438Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:44:58.831494Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:44:58.831506Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:44:58.831763Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:44:58.831764Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:44:58.831770Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:44:58.831772Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:44:58.832022Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:44:58.832033Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:44:58.832097Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:44:58.832123Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:44:58.832168Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:44:58.832180Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:44:58.832224Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:44:58.832235Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:44:58.832289Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:44:58.832300Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldCheckAutomaticTtl::Execute_(NUnitTest::TTestContext&)/tenants": >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] [GOOD] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic >> test.py::test[column_group-hint_non_lst_yson_fail--Results] [SKIPPED] >> test.py::test[column_group-many_inserts--Results] [SKIPPED] >> test.py::test[column_order-insert_tmp-default.txt-Results] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_tenant_then_exec_yql[enable_alter_database_create_hive_first--false] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldValidate >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPermission >> TYdbControlPlaneStorageGetTask::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending >> test.py::test[select-from_in_front_sub-default.txt-Results] [GOOD] >> test.py::test[select-hits_count--Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckExist >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByName >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--true] >> test.py::test[column_order-insert_tmp-default.txt-Results] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert_with_reorder_cols--Results] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckIdempotencyKey >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByName [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByMe >> test_publish_into_schemeboard_with_common_ssring.py::TestOn3DC::test_create_dirs >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByMe [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCombineFilters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=2928582) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tr ... rceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/pool.py:268: ResourceWarning: unclosed running multiprocessing pool ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery >> test.py::test[column_order-insert_with_reorder_cols--Results] [GOOD] >> test.py::test[column_order-join_nosimple--Results] [SKIPPED] >> test.py::test[column_order-union_all-default.txt-Results] >> test.py::test[select-hits_count--Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] >> TYdbControlPlaneStorageListConnections::ShouldCombineFilters [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType >> test_publish_into_schemeboard_with_common_ssring.py::TestOn3DC::test_create_dirs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... 025-05-05T09:45:08.222192Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2025-05-05T09:45:08.222208Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:08.222210Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:08.222651Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T09:45:08.222661Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:08.222662Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:08.223003Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T09:45:08.223015Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:08.223017Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:08.223214Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:45:08.223224Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:08.223226Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:08.223878Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:45:08.223887Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:08.223890Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:08.235869Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:08.235894Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:08.251499Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:08.251519Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:08.276064Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:08.276083Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:45:08.276237Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:08.276264Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:08.276391Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:08.276403Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:08.277184Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:08.277199Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:45:08.277213Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:08.277218Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:08.277359Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:08.277364Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:08.277366Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:08.277370Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:08.277437Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:08.277446Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:08.277710Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:08.277738Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:45:08.278209Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:08.278222Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:45:08.278649Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:08.278660Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:08.278721Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:08.278727Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:08.278794Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:08.278797Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteQuery::TTestCaseShouldCheckPreviousRevisionFailed::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:08.610602Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:185: Revision of the query has been changed already. Please restart the request with a new revision 2025-05-05T09:45:08.610842Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: DeleteQueryRequest - DeleteQueryResult: {query_id: "utquebjuig4ou75nnbgf" previous_revision: 100 } ERROR: {
: Error: Revision of the query has been changed already. Please restart the request with a new revision, code: 1003 } >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] >> test.py::test[column_order-union_all-default.txt-Results] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] >> test.py::test[select-literal_bool-default.txt-Results] [GOOD] >> test.py::test[select-missing_with_nonpersist--Results] [SKIPPED] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] >> test_tenants.py::TestTenants::test_yql_operations_over_dynamic_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] [GOOD] >> test.py::test[csee-yql-7237--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... te table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:10.970530Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:10.970688Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T09:45:10.970698Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:10.970700Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:10.971006Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2025-05-05T09:45:10.971021Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:10.971023Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:10.971275Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:45:10.971283Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:10.971285Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:10.971740Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:45:10.971750Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:10.971752Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:10.972032Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:45:10.972040Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:10.972041Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:10.984468Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:10.984492Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:11.000229Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:11.000249Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:11.019918Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:11.019940Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:11.020008Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:11.020021Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:11.020192Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:11.020201Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:11.020216Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:11.020228Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:11.020287Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:11.020294Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:11.020315Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:11.020321Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:45:11.020351Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:11.020353Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:11.020372Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:11.020381Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:11.020493Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:11.020501Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:11.020503Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:11.020507Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:11.020585Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:11.020595Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:45:11.020600Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:11.020603Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:45:11.020659Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:11.020668Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListConnections::TTestCaseShouldCheckFilterByConnectionType::Execute_(NUnitTest::TTestContext&)/pending_small": >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--false] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] [SKIPPED] >> test.py::test[table_range-each_with_non_existing--Results] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_publish_into_schemeboard_with_common_ssring.py::TestOn3DC::test_create_dirs [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask >> test.py::test[table_range-each_with_non_existing--Results] [GOOD] >> test.py::test[table_range-range_with_view--Results] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask >> test.py::test[csee-yql-7237--Results] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-Results] >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckSuperUser >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [GOOD] |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess >> TYdbControlPlaneStorageModifyBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckWithoutIdempotencyKey |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldValidate >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[table_range-table_funcs_expr--Results] >> TYdbControlPlaneStorageCreateQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:20.366841Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T09:45:20.366853Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:20.366854Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:20.366902Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:45:20.366928Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:20.366929Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:20.367500Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T09:45:20.367511Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:20.367513Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:20.368130Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:45:20.368138Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:20.368141Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:20.368842Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:45:20.368865Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:20.368867Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:20.376407Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:20.376447Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:20.394612Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:20.394637Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:20.409443Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:20.409465Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:20.409548Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:20.409571Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:20.409858Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:20.409861Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:20.409864Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:20.409874Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:45:20.409958Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:20.409969Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:45:20.410041Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:20.410051Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:20.410073Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:20.410091Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:20.410115Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:20.410118Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:20.410159Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:20.410161Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:20.412761Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:20.412790Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:20.412808Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:20.412815Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:20.413027Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:20.413027Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:20.413030Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:45:20.413037Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStoragePipeline::TTestCaseShouldReturnPartialBatchForGetTask::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:45:21.357699Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: PingTaskRequest (resign): UNAVAILABLE 1 2025-05-05T09:45:21.357656Z 0.000000s 2025-05-05T09:45:21.608694Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: PingTaskRequest (resign): UNAVAILABLE 1 2025-05-05T09:45:21.608674Z 0.000000s 2025-05-05T09:45:21.768052Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: Validation: (NYql::TCodeLineException) :0: Error parsing proto message for query. Please contact internal support >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess >> TYdbControlPlaneStorageModifyBinding::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionFailed >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test.py::test[distinct-distinct_window-default.txt-Results] [GOOD] >> test.py::test[epochs-write_and_use_in_same_epoch--Results] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckPrivateVisibility >> test.py::test[table_range-table_funcs_expr--Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Results] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionSuccess |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission [GOOD] |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--false] [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScopeWithPrivateConnection >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate >> TYdbControlPlaneStorageListQueries::ShouldSuccess >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] |85.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate >> test.py::test[epochs-write_and_use_in_same_epoch--Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] >> TYdbControlPlaneStorageListQueries::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldPageToken >> TYdbControlPlaneStorageListBindings::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckSuperUser >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScopeWithPrivateConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreateScopeeBindingWithUnavailableConnection >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Results] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckIdempotencyKey >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreateScopeeBindingWithUnavailableConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateBindingWithUnavailableConnection >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateBindingWithUnavailableConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateConnectionWithDesctructionBinding >> TYdbControlPlaneStorageListBindings::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckFilterByConnectionId >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Results] [GOOD] >> test.py::test[file-file_constness--Results] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldValidate >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess >> TYdbControlPlaneStorageListQueries::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterName >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateConnectionWithDesctructionBinding [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate >> TYdbControlPlaneStorageListBindings::ShouldCheckFilterByConnectionId [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] >> TYdbControlPlaneStorageListQueries::ShouldFilterName [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterType >> test.py::test[tpch-q13-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-Results] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] >> test.py::test[file-file_constness--Results] [GOOD] >> test.py::test[flatten_by-flatten_and_where--Results] >> TYdbControlPlaneStorageListQueries::ShouldFilterType [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... 45:36.804337Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:36.804390Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:45:36.804398Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T09:45:36.804400Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:36.804402Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:36.804410Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:36.804411Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:36.804697Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:45:36.804710Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:36.804712Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:36.804760Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:45:36.804769Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:36.804771Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:36.817972Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:36.818011Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:36.833527Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:36.833547Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:36.847455Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:36.847480Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:45:36.847509Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:36.847519Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:36.847774Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:36.847776Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:36.847779Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:36.847784Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:36.847854Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:36.847857Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:36.847901Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:36.847904Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:36.847922Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:36.847924Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:36.848098Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:36.848108Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:45:36.848195Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:36.848208Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:45:36.848493Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:36.848513Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:36.848516Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:36.848521Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:36.848675Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:36.848682Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:36.848698Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:36.848710Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckObjectStorageProjectionByTypes::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:45:37.319582Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: [yandexcloud://test_folder_id_1, test_user@staff, utbuebjuhkgbl7h3uog8] CreateBindingRequest, validation failed: **** (D7BA8005) content { name: "test_binding_name_1" connection_id: "utcuebjuhkrt01u2v013" setting { object_storage { subset { path_pattern: "/root/" schema { column { name: "a" type { type_id: BOOL } } } partitioned_by: "a" } } } acl { visibility: PRIVATE } } error:
: Error: Column "a" from projection does not support Bool type, code: 400010 >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterMode [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivate >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [GOOD] >> InMemoryControlPlaneStorage::ExecuteSimpleStreamQuery >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:37.048831Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:37.049015Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-05-05T09:45:37.049023Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:37.049037Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:37.049411Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T09:45:37.049449Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:37.049451Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:37.049589Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:45:37.049600Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:37.049603Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:37.050198Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T09:45:37.050216Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:37.050218Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:37.050451Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:45:37.050466Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:37.050467Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:37.051195Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T09:45:37.051208Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:37.051209Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:37.055245Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:37.055262Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:37.078159Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:37.078180Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:37.083999Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:37.084022Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:45:37.084030Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:37.084037Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:37.097952Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:37.097984Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:37.098243Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:37.098256Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:37.098300Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:37.098310Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:37.098396Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:37.098406Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:37.098407Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:37.098409Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:45:37.098466Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:37.098475Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:37.098476Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:37.098478Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:37.098548Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:37.098557Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:37.098571Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:37.098580Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:45:37.098619Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:37.098627Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:37.098675Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:37.098683Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListBindings::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings": >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate >> InMemoryControlPlaneStorage::ExecuteSimpleStreamQuery [GOOD] >> test.py::test[flatten_by-flatten_and_where--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Results] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic >> InMemoryControlPlaneStorage::ExecuteSimpleAnalyticsQuery >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_system_views.py::TestQueryMetrics::test_case [GOOD] >> test.py::test[tpch-q20-default.txt-Results] [GOOD] >> test.py::test[tpch-q21-default.txt-Results] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckPermission >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckExist >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckExist >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldValidate >> test.py::test[flatten_by-flatten_by_typed_table--Results] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] >> InMemoryControlPlaneStorage::ExecuteSimpleAnalyticsQuery [GOOD] |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse >> TYdbControlPlaneStorageGetQueryStatus::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckSuperUser >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionEmpty >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... TestContext&)/connections". Create session OK 2025-05-05T09:45:42.584051Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:42.584052Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:42.584212Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T09:45:42.584221Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:42.584222Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:42.584612Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:45:42.584622Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:42.584624Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:42.584911Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:45:42.584921Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:42.584922Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:42.585228Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T09:45:42.585238Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:42.585239Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:42.590133Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:42.590156Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:42.616211Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:42.616235Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:42.629403Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:42.629419Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:42.629511Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:42.629533Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:42.630139Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:42.630140Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:42.630143Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:42.630153Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:42.630222Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:42.630224Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:42.630274Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:42.630277Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:42.630279Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:45:42.630286Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:42.630386Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:42.630388Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:42.630396Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:42.630398Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:45:42.630450Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:42.630458Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:42.630474Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:42.630483Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:42.630508Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:42.630510Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:45:42.630538Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:42.630546Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDeleteBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse [GOOD] >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivate |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldSucccess >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] >> test.py::test[tpch-q21-default.txt-Results] [GOOD] >> test.py::test[tpch-q6-default.txt-Results] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] >> TYdbControlPlaneStorageControlQuery::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldValidate >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... eate table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:45.779235Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:45:45.779246Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:45.779248Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:45.779350Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T09:45:45.779360Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:45.779363Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:45.780451Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T09:45:45.780462Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:45.780464Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:45.780480Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2025-05-05T09:45:45.780483Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:45.780484Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:45.781245Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-05-05T09:45:45.781256Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:45.781257Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:45.781440Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:45:45.781448Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:45.781449Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:45.789434Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:45.789451Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:45.803328Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:45.803345Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:45.818690Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:45.818711Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:45:45.818785Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:45.818794Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:45.818990Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:45.819008Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:45.819049Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:45.819051Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:45.819119Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:45.819128Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:45.819181Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:45.819188Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:45:45.821420Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:45.821433Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:45.821463Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:45.821469Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:45.821615Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:45.821632Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:45.821655Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:45.821658Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:45.821695Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:45.821703Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:45.821730Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:45.821743Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:45:45.821813Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:45.821823Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings": >> test.py::test[flatten_by-flatten_dict_by_opt--Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--Results] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionFailed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... te session OK 2025-05-05T09:45:47.778232Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:47.778233Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:47.778720Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2025-05-05T09:45:47.778729Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:47.778731Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:47.778870Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:45:47.778890Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:47.778891Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:47.779088Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:45:47.779104Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:47.779106Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:47.779302Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:45:47.779317Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:47.779319Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:47.793483Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:47.793506Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:47.809431Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:47.809453Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:47.828430Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:47.828453Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:45:47.829558Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:47.829592Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:47.829593Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:47.829601Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:47.829754Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:47.829765Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:47.829841Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:47.829855Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:47.829858Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:47.829859Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:47.829915Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:47.829916Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:47.829962Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:47.829964Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:47.829966Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:47.829971Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:45:47.830010Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:47.830021Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:45:47.854151Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:47.854188Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:47.854228Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:47.854258Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:47.854423Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:47.854432Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": >> test.py::test[tpch-q6-default.txt-Results] [GOOD] >> test.py::test[type_v3-bare_yson--Results] [SKIPPED] >> test.py::test[type_v3-mergejoin_with_sort--Results] >> test_system_views.py::TestQueryMetricsUniqueQueries::test_case >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionEmpty |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::test_operation_with_locks[enable_alter_database_create_hive_first--true] [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePublic >> test.py::test[flatten_by-flatten_one_field_another--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Results] >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:49.500366Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:49.501458Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T09:45:49.501464Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2025-05-05T09:45:49.501470Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:49.501471Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:49.501472Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:49.501474Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:49.501668Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:45:49.501679Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:49.501680Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:49.501689Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:45:49.501692Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:49.501693Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:49.513582Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:45:49.513622Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:45:49.529214Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:45:49.529235Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:45:49.548790Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:45:49.548796Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:45:49.548803Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:45:49.548808Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:45:49.549031Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:45:49.549039Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:45:49.549041Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:45:49.549042Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:45:49.549111Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:45:49.549121Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:45:49.549137Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:45:49.549145Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:45:49.549180Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:45:49.549189Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:45:49.549204Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:45:49.549206Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:45:49.549248Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:45:49.549255Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:45:49.549256Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:45:49.549259Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:45:49.549318Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:45:49.549326Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:45:49.550339Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:45:49.550339Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:45:49.550345Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:45:49.550350Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeBindingPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivate >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldValidate >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldValidate |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TYdbControlPlaneStorageModifyConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=2971490) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/c ... st_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/pool.py:268: ResourceWarning: unclosed running multiprocessing pool ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckLowerCaseName >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] >> TYdbControlPlaneStorageCreateBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] [GOOD] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] [GOOD] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckAllowedSymbolsName >> test.py::test[flatten_by-flatten_with_group_by--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-max_outtables--Results] [SKIPPED] >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess |86.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[hor_join-max_outtables--Results] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxCountBindings >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Utf8-pk_types30-all_types30-index30---] [GOOD] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldMoveFromScopeToPrivateWithError >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxCountBindings [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxCountConnections >> TYdbControlPlaneStorageModifyConnection::ShouldMoveFromScopeToPrivateWithError [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionEmpty |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxCountConnections [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckIdempotencyKey |86.2%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |86.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int32-pk_types20-all_types20-index20---] [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> TYdbControlPlaneStorageCreateConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckLowerCaseName |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPublic |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal3510-pk_types27-all_types27-index27---] [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMultipleDotsName >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-std] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey [GOOD] Test command err: 2025-05-05T09:45:40.239580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500895558230524835:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:45:40.239653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000f08/r3tmp/tmpGEKMz4/pdisk_1.dat 2025-05-05T09:45:40.311600Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27120, node 1 2025-05-05T09:45:40.324736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:45:40.324753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:45:40.324756Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:45:40.324810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3448 WaitRootIsUp 'Root'... 2025-05-05T09:45:40.343033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:45:40.343064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls request: Root 2025-05-05T09:45:40.344119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T09:45:40.356057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:45:40.366708Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvNodesHealthCheckRequest 2025-05-05T09:45:40.699712Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T09:45:40.700836Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvCreateQueryRequest 2025-05-05T09:45:40.701266Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvDescribeQueryRequest Wait query execution 0.000353s: STARTING 2025-05-05T09:45:41.699748Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T09:45:41.700015Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Start run actor. Compute state: STARTING 2025-05-05T09:45:41.700024Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m FillConnections 2025-05-05T09:45:41.700048Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Run actors params: { QueryId: utquebjuhh959kmkkn9m CloudId: mock_cloud UserId: root@builtin Owner: 1f76671c-24b33591-a1123c8a-9ea57a6b2 PreviousQueryRevision: 1 Connections: 0 Bindings: 0 AccountIdSignatures: 0 QueryType: STREAMING ExecuteMode: RUN ResultId: utruebjuhgalkp0skngh StateLoadMode: EMPTY StreamingDisposition: { } Status: STARTING DqGraphs: 0 DqGraphIndex: 0 Resource.TopicConsumers: 0 } 2025-05-05T09:45:41.700055Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T09:45:41.700094Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Compiling query ... 2025-05-05T09:45:41.700435Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T09:45:41.700532Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebjuhh959kmkkn9m Forward ping response. Success: 1. Cookie: 2 2025-05-05T09:45:41.700628Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T09:45:41.700672Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebjuhh959kmkkn9m Forward ping response. Success: 1. Cookie: 0 2025-05-05T09:45:41.701766Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvDescribeQueryRequest Wait query execution 1.000868s: RUNNING 2025-05-05T09:45:41.735275Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Graph (execution) with tasks: 1 2025-05-05T09:45:41.735548Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Overall dq tasks: 1 2025-05-05T09:45:41.735608Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Graph 0 2025-05-05T09:45:41.735806Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T09:45:41.735911Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebjuhh959kmkkn9m Forward ping response. Success: 1. Cookie: 0 2025-05-05T09:45:41.735989Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T09:45:41.736036Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebjuhh959kmkkn9m Forward ping response. Success: 1. Cookie: 1 2025-05-05T09:45:41.736046Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Overall dq tasks: 1 2025-05-05T09:45:41.736175Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T09:45:41.736250Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Executer: [1:7500895562525492813:2356], Controller: [1:7500895562525492815:2358], ResultIdActor: [1:7500895562525492814:2357] 2025-05-05T09:45:41.736269Z node 1 :FQ_RUN_ACTOR TRACE: QueryId: utquebjuhh959kmkkn9m Forward ping response. Success: 1. Cookie: 0 2025-05-05T09:45:41.736460Z node 1 :YQL_PROXY WARN: SessionId: 1f76671c-24b33591-a1123c8a-9ea57a6b 2025-05-05 09:45:41.736 WARN ydb-tests-fq-control_plane_storage(pid=2988094, tid=0x00007F63E65E4640) [DQ] resource_allocator.cpp:259: {utquebjuhh959kmkkn9m/[1:7500895562525492816:2359]} Send TEvAllocateWorkersRequest to Id:a9df22dc-41f6c25a-a770b94-4eb9e704,ghrun-2g7s2x4jf4,NodeId:1, 2025-05-05T09:45:41.739879Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvWriteResultDataRequest 2025-05-05T09:45:41.743739Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Query response SUCCESS. Result set index: 0. Issues count: 0. Rows count: 1 2025-05-05T09:45:41.744321Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Is about to finish query with status COMPLETED 2025-05-05T09:45:41.744334Z node 1 :FQ_RUN_ACTOR DEBUG: QueryId: utquebjuhh959kmkkn9m Write finalizing status: COMPLETING 2025-05-05T09:45:41.744591Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T09:45:41.744836Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvPingTaskRequest 2025-05-05T09:45:42.700136Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T09:45:42.702369Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvDescribeQueryRequest 2025-05-05T09:45:42.702804Z node 1 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetResultDataRequest 2025-05-05T09:45:42.956906Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500895563547016123:2062];send_to=[0:7307199536658146131:7762515]; 2025-05-05T09:45:42.956936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000f08/r3tmp/tmpEZp3Ol/pdisk_1.dat 2025-05-05T09:45:42.972496Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21084, node 2 2025-05-05T09:45:42.982048Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T09:45:42.982064Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T09:45:42.982066Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T09:45:42.982119Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T09:45:43.061665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T09:45:43.061700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T09:45:43.062133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T09:45:43.062617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T09:45:43.065902Z node 2 :YQ_CONTROL_PLANE_STORAGE INFO: TEvNodesHealthCheckRequest 2025-05-05T09:45:43.390166Z node 2 :YQ_CONTROL_PLANE_STORAGE INFO: TEvGetTaskRequest 2025-05-05T09:45:43.390334Z node 2 :FQ_RUN_ACTOR TRACE: [ydb] [ComputeDatabaseControlPlane]: Scope: yandexcloud://fqrun Single control plane mode has been chosen 2025-05-05T09:45:43.390647Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start synchronization for the scope yandexcloud://fqrun 2025-05-05T09:45:43.391119Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start fetch connections stage for the scope (single) yandexcloud://fqrun 2025-05-05T09:45:43.391131Z node 2 :FQ_RUN_ACTOR TRACE: [ydb] [SynchronizationService]: Send list connections: scope = yandexcloud://fqrun, page token = 2025-05-05T09:45:43.391221Z node 2 :YQ_CONTROL_PLANE_STORAGE INFO: TEvListConnectionsRequest 2025-05-05T09:45:43.391264Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start fetch bindings stage for the scope yandexcloud://fqrun 2025-05-05T09:45:43.391273Z node 2 :FQ_RUN_ACTOR TRACE: [ydb] [SynchronizationService]: Send list bindings: scope = yandexcloud://fqrun, page token = 2025-05-05T09:45:43.391392Z node 2 :YQ_CONTROL_PLANE_STORAGE INFO: TEvListBindingsRequest 2025-05-05T09:45:43.391470Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start describe bindings stage for the scope yandexcloud://fqrun 2025-05-05T09:45:43.391481Z node 2 :FQ_RUN_ACTOR INFO: [ydb] [SynchronizationService]: Start create external data sources stage for the scope (bindigns list is empty) yandexcloud://fqrun 2025-05-05T09:45:43.391491Z node 2 :FQ ... lPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:46:00.970667Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:46:00.970681Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:00.970683Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:00.970685Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T09:46:00.970689Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:46:00.970692Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:46:00.970857Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:46:00.970860Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:00.970861Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:00.970908Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:46:00.970938Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:00.970940Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:00.971534Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T09:46:00.971549Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:00.971552Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:00.971623Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:46:00.971636Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:00.971638Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:00.984582Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:46:00.984605Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:46:01.007167Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:46:01.007191Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:46:01.020296Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:01.020321Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:46:01.020349Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:46:01.020358Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:46:01.020584Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:46:01.020595Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:46:01.020630Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:01.020640Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:46:01.020668Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:46:01.020676Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:46:01.021431Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:46:01.021438Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:46:01.021444Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:46:01.021446Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:46:01.021545Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:46:01.021569Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:46:01.021574Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:01.021576Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:46:01.021634Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:46:01.021655Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:46:01.021657Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:46:01.021660Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:46:01.021725Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:01.021733Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:46:01.021811Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:01.021821Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateBinding::TTestCaseShouldCheckIdempotencyKey::Execute_(NUnitTest::TTestContext&)/connections": >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckAllowedSymbolsName |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> TYdbControlPlaneStorageCreateConnection::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionWrite |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivate |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-fifo] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivatePublic >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionWrite [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp64-pk_types38-all_types38-index38---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPermission >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckExist |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> TYdbControlPlaneStorageModifyQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckIdempotencyKey |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_String-pk_types29-all_types29-index29---] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert: got overload issue delete #0 ok delete #1 ok delete #2 ok delete #3 ok delete #4 ok delete #5 ok delete #6 ok delete #7 ok delete #8 ok delete #9 ok delete #10 ok delete #11 ok delete #12 ok delete #13 ok delete #14 ok delete #15 ok delete #16 ok delete #17 ok delete #18 ok delete #19 ok delete #20 ok delete #21 ok delete #22 ok delete #23 ok delete #24 ok delete #25 ok delete #26 ok delete #27 ok delete #28 ok delete #29 ok delete #30 ok delete #31 ok delete #32 ok delete #33 ok delete #34 ok delete #35 ok delete #36 ok delete #37 ok delete #38 ok delete #39 ok delete #40 ok delete #41 ok delete #42 ok delete #43 ok delete #44 ok delete #45 ok delete #46 ok delete #47 ok delete #48 ok delete #49 ok delete #50 ok delete #51 ok delete #52 ok delete #53 ok delete #54 ok delete #55 ok delete #56 ok delete #57 ok delete #58 ok delete #59 ok delete #60 ok delete #61 ok delete #62 ok delete #63 ok delete #64 ok delete #65 ok delete #66 ok delete #67 ok delete #68 ok delete #69 ok delete #70 ok delete #71 ok delete #72 ok delete #73 ok delete #74 ok delete #75 ok delete #76 ok delete #77 ok delete #78 ok delete #79 ok delete #80 ok delete #81 ok delete #82 ok delete #83 ok delete #84 ok delete #85 ok delete #86 ok delete #87 ok delete #88 ok delete #89 ok delete #90 ok delete #91 ok delete #92 ok delete #93 ok delete #94 ok delete #95 ok delete #96 ok delete #97 ok delete #98 ok delete #99 ok delete #100 ok delete #101 ok delete #102 ok delete #103 ok delete #104 ok delete #105 ok delete #106 ok delete #107 ok delete #108 ok delete #109 ok delete #110 ok delete #111 ok delete #112 ok delete #113 ok delete #114 ok delete #115 ok delete #116 ok delete #117 ok delete #118 ok delete #119 ok delete #120 ok delete #121 ok delete #122 ok delete #123 ok delete #124 ok delete #125 ok delete #126 ok delete #127 ok delete #128 ok delete #129 ok delete #130 ok delete #131 ok delete #132 ok delete #133 ok delete #134 ok delete #135 ok delete #136 ok delete #137 ok delete #138 ok delete #139 ok delete #140 ok delete #141 ok delete #142 ok delete #143 ok delete #144 ok delete #145 ok delete #146 ok delete #147 ok delete #148 ok delete #149 ok delete #150 ok delete #151 ok delete #152 ok delete #153 ok delete #154 ok delete #155 ok delete #156 ok delete #157 ok delete #158 ok delete #159 ok delete #160 ok delete #161 ok delete #162 ok delete #163 ok delete #164 ok delete #165 ok delete #166 ok delete #167 ok delete #168 ok delete #169 ok delete #170 ok delete #171 ok delete #172 ok delete #173 ok delete #174 ok delete #175 ok delete #176 ok delete #177 ok delete #178 ok delete #179 ok delete #180 ok delete #181 ok delete #182 ok delete #183 ok delete #184 ok delete #185 ok delete #186 ok delete #187 ok delete #188 ok delete #189 ok delete #190 ok delete #191 ok delete #192 ok delete #193 ok delete #194 ok delete #195 ok delete #196 ok delete #197 ok delete #198 ok delete #199 ok delete #200 ok delete #201 ok delete #202 ok delete #203 ok delete #204 ok delete #205 ok delete #206 ok delete #207 ok delete #208 ok delete #209 ok delete #210 ok delete #211 ok delete #212 ok delete #213 ok delete #214 ok delete #215 ok delete #216 ok delete #217 ok delete #218 ok delete #219 ok delete #220 ok delete #221 ok delete #222 ok delete #223 ok delete #224 ok delete #225 ok delete #226 ok delete #227 ok delete #228 ok delete #229 ok delete #230 ok delete #231 ok delete #232 ok delete #233 ok delete #234 ok delete #235 ok delete #236 ok delete #237 ok delete #238 ok delete #239 ok delete #240 ok delete #241 ok delete #242 ok delete #243 ok delete #244 ok delete #245 ok delete #246 ok delete #247 ok delete #248 ok delete #249 ok delete #250 ok delete #251 ok delete #252 ok delete #253 ok delete #254 ok delete #255 ok delete #256 ok delete #257 ok delete #258 ok delete #259 ok delete #260 ok delete #261 ok delete #262 ok delete #263 ok delete #264 ok delete #265 ok delete #266 ok delete #267 ok delete #268 ok delete #269 ok delete #270 ok delete #271 ok delete #272 ok delete #273 ok delete #274 ok delete #275 ok delete #276 ok delete #277 ok delete #278 ok delete #279 ok delete #280 ok delete #281 ok delete #282 ok delete #283 ok delete #284 ok delete #285 ok delete #286 ok delete #287 ok delete #288 ok delete #289 ok delete #290 ok delete #291 ok delete #292 ok delete #293 ok delete #294 ok delete #295 ok delete #296 ok delete #297 ok delete #298 ok delete #299 ok delete #300 ok delete #301 ok delete #302 ok delete #303 ok delete #304 ok delete #305 ok delete #306 ok delete #307 ok delete #308 ok delete #309 ok delete #310 ok delete #311 ok delete #312 ok delete #313 ok delete #314 ok delete #315 ok delete #316 ok delete #317 ok delete #318 ok delete #319 ok delete #320 ok delete #321 ok delete #322 ok delete #323 ok delete #324 ok delete #325 ok delete #326 ok delete #327 ok delete #328 ok delete #329 ok delete #330 ok delete #331 ok delete #332 ok delete #333 ok delete #334 ok delete #335 ok delete #336 ok delete #337 ok delete #338 ok delete #339 ok delete #340 ok delete #341 ok delete #342 ok delete #343 ok delete #344 ok delete #345 ok delete #346 ok delete #347 ok delete #348 ok delete #349 ok delete #350 ok delete #351 ok delete #352 ok delete #353 ok delete #354 ok delete #355 ok delete #356 ok delete #357 ok delete #358 ok delete #359 ok delete #360 ok delete #361 ok delete #362 ok delete #363 ok delete #364 ok delete #365 ok delete #366 ok delete #367 ok delete #368 ok delete #369 ok delete #370 ok delete #371 ok delete #372 ok delete #373 ok delete #374 ok delete #375 ok delete #376 ok delete #377 ok delete #378 ok delete #379 ok delete #380 ok delete #381 ok delete #382 ok delete #383 ok delete #384 ok delete #385 ok delete #386 ok delete #387 ok delete #388 ok delete #389 ok delete #390 ok delete #391 ok delete #392 ok delete #393 ok delete #394 ok delete #395 ok delete #396 ok delete #397 ok delete #398 ok delete #399 ok delete #400 ok delete #401 ok delete #402 ok delete #403 ok delete #404 ok delete #405 ok delete #406 ok delete #407 ok delete #408 ok delete #409 ok delete #410 ok delete #411 ok delete #412 ok delete #413 ok delete #414 ok delete #415 ok delete #416 ok delete #417 ok delete #418 ok delete #419 ok delete #420 ok delete #421 ok delete #422 ok delete #423 ok delete #424 ok delete #425 ok delete #426 ok delete #427 ok delete #428 ok delete #429 ok delete #430 ok delete #431 ok delete #432 ok delete #433 ok delete #434 ok delete #435 ok delete #436 ok delete #437 ok delete #438 ok delete #439 ok delete #440 ok delete #441 ok delete #442 ok delete #443 ok delete #444 ok delete #445 ok delete #446 ok delete #447 ok delete #448 ok delete #449 ok delete #450 ok delete #451 ok delete #452 ok delete #453 ok delete #454 ok delete #455 ok delete #456 ok delete #457 ok delete #458 ok delete #459 ok delete #460 ok delete #461 ok delete #462 ok delete #463 ok delete #464 ok delete #465 ok delete #466 ok delete #467 ok delete #468 ok delete #469 ok delete #470 ok delete #471 ok delete #472 ok delete #473 ok delete #474 ok delete #475 ok delete #476 ok delete #477 ok delete #478 ok delete #479 ok delete #480 ok delete #481 ok delete #482 ok delete #483 ok delete #484 ok delete #485 ok delete #486 ok delete #487 ok delete #488 ok delete #489 ok delete #490 ok delete #491 ok delete #492 ok delete #493 ok delete #494 ok delete #495 ok delete #496 ok delete #497 ok delete #498 ok delete #499 ok >> TYdbControlPlaneStorageModifyQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionFailed >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionSuccess |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckQueryName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.313116Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.313371Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugj6jkni29luf] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.382153Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.382344Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugj4dhnj1pp4j] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.449424Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.449685Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugj2a5hvt1rej] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.535434Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.535736Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugj08d2sdgra4] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.601735Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.601977Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugitkafn95ivq] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.670594Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.670847Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugirjlse4vng0] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.688535Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: BAD_SESSION, Issues: [ {
: Error: Exceeded maximum allowed number of active transactions, code: 2014 } {
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:861: Too many transactions, current active: 10 MaxTxPerSession: 10 } ], Query: --!syntax_v1 -- Query name: Unknown query name PRAGMA TablePathPrefix("local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateConnection::TTestCaseShouldCheckCommitTransactionReadWrite::Execute_(NUnitTest::TTestContext&)"); DECLARE $idempotency_key as String; DECLARE $scope as String; SELECT `response` FROM `idempotency_keys` WHERE `scope` = $scope AND `idempotency_key` = $idempotency_key; 2025-05-05T09:46:12.765114Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.765470Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugipgdbj176tv] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.833526Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.833776Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugimjv0rutsnl] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.907155Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.907435Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugikh9bahfpom] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:12.973975Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:12.974276Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugii9bmvetai4] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:13.042542Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:13.042797Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugig8188jo00r] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:13.104530Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:13.104786Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugie549h99kf5] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:13.169179Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:13.169412Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugic8kia0i1m1] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:13.244260Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:13.244582Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugia9h95oucer] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:13.306990Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:13.307295Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugi803m8o390q] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } 2025-05-05T09:46:13.374165Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Validation: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/validators.cpp:46: Connection with the same name already exists. Please choose another name 2025-05-05T09:46:13.374482Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: [yandexcloud://test_folder_id_1, test_user2@staff, utcuebjugi62r64f37b3] CreateConnectionRequest: {content { name: "test_connection_name_1" setting { data_streams { database_id: "my_database_id" auth { current_iam { } } } } acl { visibility: SCOPE } } idempotency_key: "aba" } ERROR: {
: Error: Connection with the same name already exists. Please choose another name, code: 1003 } >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] [GOOD] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckQueryName [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:46:16.475232Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:16.475233Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:16.475296Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-05-05T09:46:16.475307Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:46:16.475308Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:46:16.475523Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:46:16.475532Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:16.475533Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:16.475971Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T09:46:16.475980Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:16.475981Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:16.476553Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:46:16.476565Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:16.476563Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:46:16.476566Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:16.476569Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:16.476570Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:16.485939Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:46:16.485955Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:46:16.497940Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:46:16.497956Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:46:16.503736Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:46:16.503753Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:46:16.517277Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:16.517279Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:46:16.517285Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:46:16.517295Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:46:16.517521Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:46:16.517530Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:46:16.517557Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:46:16.517565Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:46:16.517599Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:46:16.517601Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:46:16.517647Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:46:16.517654Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:46:16.517684Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:16.517690Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:46:16.517830Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:16.517835Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:46:16.517845Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:16.517851Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:46:16.517891Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:46:16.517898Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:46:16.517902Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:16.517903Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:46:16.518043Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:46:16.518052Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageModifyQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries": >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess >> docker_wrapper_test.py::test_pg_generated[Test64BitErrorChecking] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int8-pk_types21-all_types21-index21---] [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionEmpty |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_list_queue_tags[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPublic |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivate >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivatePublic |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_db_counters.py::TestKqpCounters::test_case [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionEmpty |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPublic |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivate >> test_db_counters.py::TestStorageCounters::test_storage_counters[disable_separate_quotas] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivatePublic >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> docker_wrapper_test.py::test_pg_generated[Test64BitErrorChecking] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestAppendEncodedText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEscapedText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestAppendEscapedTextExistingBuffer] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayScanBackend] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayScanner] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestArrayValueBackend] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestArrayValuer] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBadConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBinaryByteSliceToInt] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBinaryByteSlicetoUUID] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBindError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestBoolArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteSliceToText] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormatEncoding] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestByteaOutputFormats] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCloseBadConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCommit] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCommitInFailedTransaction] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCommitInFailedTransactionWithCancelContext] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnExecDeadlock] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnListen] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.Background] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.WithTimeout] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnPrepareContext/context.WithTimeout_exceeded] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestConnUnlisten] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnUnlistenAll] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNoticeHandler_Simple] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestConnectorWithNotificationHandler_Simple] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelBegin] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestContextCancelQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyFromError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInBinaryError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInMultipleValues] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInRaiseStmtTrigger] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInSchemaStmt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyInStmtAffectedRows] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInTypes] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyInWrongType] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopyOutsideOfTxnError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestCopyRespLoopConnectionError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestCopySyntaxError] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestDataType] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypeLength] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypeName] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDataTypePrecisionScale] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeBool] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeUUIDBackend] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestDecodeUUIDBinaryError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEmptyQuery] [SKIPPED] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> docker_wrapper_test.py::test_pg_generated[TestEmptyResultSetColumns] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEncodeAndParseTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestEncodeDecode] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorClass] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartup] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorDuringStartupClosesConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorOnQueryRowSimpleQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestErrorSQLState] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestExec] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat32ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayValue] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckPermission >> docker_wrapper_test.py::test_pg_generated[TestFloat64ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatAndParseTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestFormatTsBackend] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestFullParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanDelimiter] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerArrayBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerArrayString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanScannerSliceString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValueErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestGenericArrayValueUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestHasCorrectRootGroupPermissions] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIPv6LoopbackParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInfinityTimestamp] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanUnsupported] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt32ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInt64ArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestInvalidProtocolParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIsUTF8] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue1046] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue1062] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue186] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue196] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue282] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestIssue494] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestIssue617] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerConnCloseWhileQueryIsExecuting] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerFailedQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerListen] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerReconnect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlisten] [SKIPPED] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestListenerUnlistenAll] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestMinimalURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleEmptyResult] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleResult] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestMultipleSimpleQuery] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Connect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_Driver] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewConnector_WorksWithOpenDB] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNewListenerConn] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNoData] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestNotifyExtra] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestNullAfterNonNull] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestOpenURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParameterCountMismatch] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseArray] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseArrayError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseComplete] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseEnviron] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseErrorInExtendedQuery] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestParseOpts] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseTs] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestParseTsErrors] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestPgpass] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestPing] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryCancelRace] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryCancelledReused] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQueryRowBugWorkaround] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestQuickClose] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQuoteIdentifier] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestQuoteLiteral] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestReadFloatPrecision] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestReconnect] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestReturning] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestRowsCloseBeforeDone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestRowsColumnTypes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestRowsResultTag] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestRuntimeParameters] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_not_passed_when_disabled] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_not_set_for_IPv4] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_passed_when_asked_for] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSNISupport/SNI_is_set_by_default] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSSLClientCertificates] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLConnection] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLRequireWithRootCert] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLVerifyCA] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestSSLVerifyFull] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestScanNilTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestScanTimestamp] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSimpleParseURL] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist >> docker_wrapper_test.py::test_pg_generated[TestSimpleParseURL] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestSimpleQuery] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStatment] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.Background] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtExecContext/context.WithTimeout_exceeded] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.Background] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStmtQueryContext/context.WithTimeout_exceeded] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanBytes] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanEmpty] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanError] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanNil] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayScanUnsupported] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringArrayValue] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringToBytea] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringToUUID] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestStringWithNul] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToInt] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTextByteSliceToUUID] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTextDecodeIntoString] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone] [GOOD] |86.7%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |86.7%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+00:00_=>_0000-01-01T11:59:59Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+04:00_=>_0000-01-01T11:59:59+04:00] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59+04:01:02_=>_0000-01-01T11:59:59+04:01] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/11:59:59-04:01:02_=>_0000-01-01T11:59:59-04:01] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00-04:00_=>_0000-01-02T00:00:00-04:00] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00.0+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00:00.000000+00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithTimezone/24:00Z_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/11:59:59_=>_0000-01-01T11:59:59Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00.000000_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00.0_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00:00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimeWithoutTimezone/24:00_=>_0000-01-02T00:00:00Z] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimestampWithOutTimezone] [GOOD] >> docker_wrapper_test.py::test_pg_generated[TestTimestampWithTimeZone] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestTxOptions] [SKIPPED] >> docker_wrapper_test.py::test_pg_generated[TestXactMultiStmt] [SKIPPED] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:53 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:22 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:00 [ksoftirqd/25] root 169 0.0 ... ate table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:46:36.718265Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-05-05T09:46:36.718271Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:36.718272Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:36.718375Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2025-05-05T09:46:36.718383Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:46:36.718383Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:46:36.718504Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:46:36.718510Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:36.718511Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:36.718748Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-05-05T09:46:36.718759Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:36.718761Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:36.718763Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-05-05T09:46:36.718764Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:36.718765Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:36.719814Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:46:36.719823Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:36.719824Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:36.724929Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:46:36.724942Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:46:36.740275Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:46:36.740295Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:46:36.759313Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:46:36.759334Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-05-05T09:46:36.759366Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:46:36.759373Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:46:36.759595Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:46:36.759604Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:46:36.759665Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:46:36.759673Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:46:36.759678Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:46:36.759683Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:46:36.759728Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:46:36.759736Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:46:36.759789Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:46:36.759797Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:46:36.759801Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:46:36.759803Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:46:36.759840Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:46:36.759847Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:46:36.759870Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:46:36.759879Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:46:36.759910Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:46:36.759917Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:46:36.759936Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:46:36.759944Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:46:36.759964Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:46:36.759970Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:46:37.045655Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "abra" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:664: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/py3test >> docker_wrapper_test.py::test_pg_generated[TestXactMultiStmt] [SKIPPED] Test command err: ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:92: ResourceWarning: unclosed image = _docker_build(_tests_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/postgres_integrations/library/pytest_integration.py:96: ResourceWarning: unclosed _run_tests_in_docker(image, env, exchange_folder, tests_result_folder) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_drain.py::TestHive::test_drain_tablets |86.8%| [TA] $(B)/ydb/tests/postgres_integrations/go-libpq/test-results/py3test/{meta.json ... results_accumulator.log} |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |86.9%| [TA] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/test-results/py3test/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Timestamp-pk_types34-all_types34-index34---] [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--false] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal229-pk_types26-all_types26-index26---] [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_drain.py::TestHive::test_drain_tablets [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [FAIL] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=2995727) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tr ... e object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> test_public_api.py::TestExplain::test_explain_data_query >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_system_views.py::TestQueryMetricsUniqueQueries::test_case [GOOD] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] [GOOD] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_ttl.py::TestTTLDefaultEnv::test_case |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_create_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[select-reuse_named_node-default.txt-ForceBlocks] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_all_types-pk_types12-all_types12-index12---] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval-pk_types35-all_types35-index35---] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> test.py::test[select-reuse_named_node-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Results] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect-1000-ForceBlocks] |87.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test.py::test[select-unlabeled--Results] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test.py::test[select-sampleselect-1000-ForceBlocks] [GOOD] >> test.py::test[select-sampleselect-1000-Results] >> test.py::test[action-eval_for-default.txt-ForceBlocks] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_csee-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug8533--Results] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[sampling-topsort-default.txt-Results] >> test.py::test[select-sampleselect-1000-Results] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-ForceBlocks] >> test_db_counters.py::TestStorageCounters::test_storage_counters[disable_separate_quotas] [GOOD] >> test.py::test[select-unlabeled--Results] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-ForceBlocks] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_db_counters.py::TestStorageCounters::test_storage_counters[enable_separate_quotas] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test.py::test[action-eval_for-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_for-default.txt-Results] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test.py::test[select-select_concrete_detailed_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test.py::test[join-lookupjoin_bug8533--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> test.py::test[action-eval_for-default.txt-Results] [GOOD] >> test.py::test[action-eval_sample--ForceBlocks] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] >> test.py::test[schema-append_to_desc_with_remap--Results] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base_fail--ForceBlocks] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-ForceBlocks] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test.py::test[lineage-select_table_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-extend-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unordered_over_sort--Results] >> test.py::test[optimizers-unordered_over_sort--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> test.py::test[simple_columns-simple_columns_base_fail--ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_base_fail--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-ForceBlocks] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 >> test_row_dispatcher.py::TestPqRowDispatcher::test_2_connection |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test.py::test[schema-append_to_desc_with_remap--Results] [GOOD] >> test.py::test[schema-insert-schema-Results] >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[action-eval_sample--ForceBlocks] [GOOD] >> test.py::test[action-eval_sample--Results] >> test.py::test[join-lookupjoin_semi_2o--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] [GOOD] >> test.py::test[action-eval_sample--Results] [GOOD] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-ForceBlocks] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] [GOOD] >> test.py::test[tpch-q5-default.txt-ForceBlocks] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[schema-insert-schema-Results] [GOOD] >> test.py::test[select-braces-default.txt-Results] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-ForceBlocks] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_star--Results] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Results] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off-Results] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner_both_sides--Results] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test.py::test[join-opt_on_opt_side-off-Results] [SKIPPED] >> test.py::test[join-premap_common_semi--Results] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] [GOOD] >> test.py::test[table_range-range_slash--ForceBlocks] >> test.py::test[action-process_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-list--ForceBlocks] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Int64-pk_types19-all_types19-index19---] [GOOD] >> test.py::test[select-braces-default.txt-Results] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test.py::test[tpch-q5-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q5-default.txt-Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping--Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test.py::test[tpch-q5-default.txt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--ForceBlocks] >> test_copy_table.py::TestCopyTable::test_copy_table[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] [GOOD] >> test.py::test[select-literal_negative-default.txt-Results] >> test.py::test[table_range-range_slash--ForceBlocks] [GOOD] >> test.py::test[table_range-range_slash--Results] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Results] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Results] >> test.py::test[join-premap_common_semi--Results] [GOOD] >> test.py::test[join-premap_merge_with_remap-off-Results] [SKIPPED] >> test.py::test[join-premap_no_premap--Results] >> test.py::test[table_range-range_slash--Results] [GOOD] >> test.py::test[table_range-range_tables_with_view--ForceBlocks] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Decimal150-pk_types25-all_types25-index25---] [GOOD] >> test.py::test[aggr_factory-list--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-list--Results] >> test.py::test[type_v3-ignore_v3_hint--ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Results] >> test.py::test[select-literal_negative-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect--Results] |87.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] [GOOD] >> test.py::test[optimizers-yt_shuffle_by_keys--Results] [SKIPPED] >> test.py::test[order_by-assume_with_filter--Results] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[type_v3-ignore_v3_hint--Results] [GOOD] >> test.py::test[view-file_outer--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr >> test.py::test[aggr_factory-list--Results] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-ForceBlocks] >> test.py::test[table_range-range_tables_with_view--ForceBlocks] [GOOD] >> test.py::test[table_range-range_tables_with_view--Results] >> test.py::test[view-file_outer--ForceBlocks] [GOOD] >> test.py::test[view-file_outer--Results] [GOOD] >> test.py::test[view-file_outer_library--ForceBlocks] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test.py::test[table_range-range_tables_with_view--Results] [GOOD] >> test.py::test[tpch-q15-default.txt-ForceBlocks] >> test.py::test[select-sampleselect--Results] [GOOD] >> test.py::test[select-trivial_having-default.txt-Results] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Results] [GOOD] >> test.py::test[order_by-assume_with_filter--Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Results] >> test.py::test[order_by-native_desc_sort-over_sorted-Results] [SKIPPED] >> test.py::test[order_by-order_by_tablerecord_column--Results] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test.py::test[aggregate-group_by_rollup_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--Results] >> test.py::test[view-file_outer_library--ForceBlocks] [GOOD] >> test.py::test[view-file_outer_library--Results] >> test.py::test[view-file_outer_library--Results] [GOOD] >> test.py::test[window-full/session_incompat_sort--ForceBlocks] >> test.py::test[select-to_dict-default.txt-Results] >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Interval64-pk_types39-all_types39-index39---] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |87.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[aggregate-avg_with_having-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] [GOOD] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column--Results] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Results] >> test.py::test[select-trivial_having-default.txt-Results] [GOOD] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-Results] |87.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> test.py::test[tpch-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q15-default.txt-Results] >> test.py::test[aggregate-avg_with_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_dict--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_2_connection [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_inner--Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test.py::test[key_filter-complex-default.txt-Results] >> test.py::test[blocks-filter_partial_expr--ForceBlocks] >> test.py::test[hor_join-yql19332_aux_cols--ForceBlocks] >> test.py::test[select-to_dict-default.txt-Results] [GOOD] >> test.py::test[select-uncorrelated_subqueries--Results] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> test.py::test[tpch-q15-default.txt-Results] [GOOD] >> test.py::test[tpch-q19-default.txt-ForceBlocks] >> test.py::test[join-premap_no_premap--Results] [GOOD] >> test.py::test[join-pullup_left_semi-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-Results] [SKIPPED] >> test.py::test[join-star_join_multi--Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-Results] >> test.py::test[window-full/session_incompat_sort--ForceBlocks] [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test.py::test[aggregate-group_by_expr_dict--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_dict--Results] >> test.py::test[pg-aggregate_minus_zero--Results] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Results] >> test.py::test[join-premap_merge_inner--Results] [GOOD] >> test.py::test[join-right_trivial--Results] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> test.py::test[blocks-filter_partial_expr--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_partial_expr--Results] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |87.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test.py::test[select-uncorrelated_subqueries--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] >> test.py::test[hor_join-yql19332_aux_cols--ForceBlocks] [GOOD] >> test.py::test[hor_join-yql19332_aux_cols--Results] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] >> test.py::test[key_filter-complex-default.txt-Results] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Results] >> test.py::test[aggregate-group_by_expr_dict--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-ForceBlocks] >> test.py::test[blocks-filter_partial_expr--Results] [GOOD] >> test.py::test[blocks-pg_from_dates--ForceBlocks] >> test.py::test[tpch-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q19-default.txt-Results] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] [GOOD] >> test.py::test[window-win_expr_bounds--ForceBlocks] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] [GOOD] >> test.py::test[hor_join-yql19332_aux_cols--Results] [GOOD] >> test.py::test[insert-append_proto_fail--ForceBlocks] >> test.py::test[tpch-q19-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] >> test.py::test[key_filter-dependent_value-default.txt-Results] [GOOD] >> test.py::test[limit-sort_calc_limit--Results] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint32-pk_types23-all_types23-index23---] [GOOD] >> test.py::test[insert-append_proto_fail--ForceBlocks] [GOOD] >> test.py::test[insert-append_proto_fail--Results] [GOOD] >> test.py::test[insert-keepmeta-with_view-ForceBlocks] [SKIPPED] >> test.py::test[insert-keepmeta-with_view-Results] >> test.py::test[join-mapjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_table_range-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join-off-Results] [SKIPPED] >> test.py::test[insert-keepmeta-with_view-Results] [SKIPPED] >> test.py::test[insert-part_sortness-desc-ForceBlocks] >> test.py::test[blocks-pg_from_dates--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_from_dates--Results] >> test.py::test[join-right_trivial--Results] [GOOD] >> test.py::test[join-three_equalities-off-Results] [SKIPPED] >> test.py::test[json-jsondocument/insert--Results] >> test.py::test[join-premap_common_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_map_cross--Results] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_with_mr [GOOD] >> test.py::test[blocks-pg_from_dates--Results] [GOOD] >> test.py::test[count-boolean_count--ForceBlocks] >> test.py::test[type_v3-decimal_yt_llvm--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Results] >> test.py::test[pg-join_using_tables3-default.txt-Results] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-Results] >> test_public_api.py::TestSessionNotFound::test_session_not_found >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v0-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-fifo] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-ForceBlocks] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] >> test.py::test[window-win_expr_bounds--ForceBlocks] [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[window-win_expr_bounds--Results] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--Results] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Results] >> test.py::test[type_v3-decimal_yt_llvm--Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--Results] >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test.py::test[insert-part_sortness-desc-ForceBlocks] [GOOD] >> test.py::test[insert-part_sortness-desc-Results] >> test.py::test[limit-sort_calc_limit--Results] [GOOD] >> test.py::test[lineage-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-insert_fill--Results] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] |87.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] >> test.py::test[window-win_expr_bounds--Results] [GOOD] >> test.py::test[window-win_func_into_udf--ForceBlocks] >> test.py::test[join-premap_map_cross--Results] [GOOD] >> test.py::test[join-premap_map_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_map_inner--Results] >> test.py::test[join-star_join_multi--Results] [GOOD] >> test.py::test[join-star_join_multi-off-Results] [SKIPPED] >> test.py::test[join-two_aggrs-default.txt-Results] >> test.py::test[insert-part_sortness-desc-Results] [GOOD] >> test.py::test[insert-select_operate_with_columns--ForceBlocks] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] |87.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |87.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery-default.txt-Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Results] [GOOD] >> test.py::test[tpch-q12-default.txt-Results] >> test.py::test[count-boolean_count--ForceBlocks] [GOOD] >> test.py::test[count-boolean_count--Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field >> test.py::test[aggregate-group_by_gs_subselect-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] [GOOD] >> test.py::test[table_range-merge_non_strict--Results] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] [GOOD] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint64-pk_types22-all_types22-index22---] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] >> test.py::test[count-boolean_count--Results] [GOOD] >> test.py::test[count-count_nullable--ForceBlocks] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date-pk_types32-all_types32-index32---] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--Results] [GOOD] >> test.py::test[aggregate-group_by_session_star--Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |88.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store >> test.py::test[insert-select_operate_with_columns--ForceBlocks] [GOOD] >> test.py::test[insert-select_operate_with_columns--Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] >> test.py::test[pg-select_subquery-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery2-default.txt-Results] >> test.py::test[join-premap_map_inner--Results] [GOOD] >> test.py::test[join-premap_merge_extrasort2-off-Results] [SKIPPED] >> test.py::test[join-pullup_renaming-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber-off-Results] [SKIPPED] >> test.py::test[join-simple_columns_partial-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted--Results] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_gs--ForceBlocks] >> test.py::test[window-win_func_into_udf--ForceBlocks] [GOOD] >> test.py::test[window-win_func_into_udf--Results] >> test.py::test[insert-select_operate_with_columns--Results] [GOOD] >> test.py::test[join-bush_in_in--ForceBlocks] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf >> test.py::test[table_range-merge_non_strict--Results] [GOOD] >> test.py::test[tpch-q17-default.txt-Results] >> test.py::test[window-win_func_into_udf--Results] [GOOD] >> test.py::test[window-yql-14479-default.txt-ForceBlocks] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] >> test.py::test[tpch-q12-default.txt-Results] [GOOD] >> test.py::test[tpch-q22-default.txt-Results] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> test.py::test[join-two_aggrs-default.txt-Results] [GOOD] >> test.py::test[join-yql-12022--Results] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-fifo] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] [GOOD] >> test.py::test[count-count_nullable--ForceBlocks] [GOOD] >> test.py::test[count-count_nullable--Results] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata >> test.py::test[count-count_nullable--Results] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-ForceBlocks] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] [GOOD] >> test.py::test[pg-select_subquery2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Results] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] >> test.py::test[join-bush_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in--Results] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test.py::test[aggregate-group_by_mul_gs_gs--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] >> test.py::test[aggregate-group_by_session_star--Results] [GOOD] >> test.py::test[aggregate-having_cast-default.txt-Results] |88.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_DyNumber-pk_types28-all_types28-index28---] [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test.py::test[window-yql-14479-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14479-default.txt-Results] >> test.py::test[join-star_join_inners_vk_sorted--Results] [GOOD] >> test.py::test[join-star_join_with_diff_complex_key--Results] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left-off-Results] [SKIPPED] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Date32-pk_types36-all_types36-index36---] [GOOD] >> test.py::test[join-yql-12022--Results] [GOOD] >> test.py::test[join-yql-14847--Results] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-std] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] [GOOD] |88.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[join-yql-14829_left-off-Results] [SKIPPED] >> test.py::test[tpch-q17-default.txt-Results] [GOOD] >> test.py::test[tpch-q18-default.txt-Results] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_UUID-pk_types31-all_types31-index31---] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] >> test.py::test[distinct-distinct_one_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Results] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v0-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] >> test.py::test[join-bush_in_in--Results] [GOOD] >> test.py::test[join-grace_join1-map-ForceBlocks] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] [GOOD] >> test.py::test[optimizers-sorted_scalar_content--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] >> test.py::test[pg-tpcds-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] [GOOD] >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test.py::test[distinct-distinct_one_count-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-ForceBlocks] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 09:47:48] "GET /mylib.sql HTTP/1.1" 200 - >> test.py::test[tpch-q22-default.txt-Results] [GOOD] >> test.py::test[tpch-q3-default.txt-Results] |88.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[aggregate-group_by_mul_gs_gs--Results] [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] >> test.py::test[aggregate-having_cast-default.txt-Results] [GOOD] >> test.py::test[aggregate-table_funcs_group_by-default.txt-Results] >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Results] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] >> test.py::test[join-yql-14847--Results] [GOOD] >> test.py::test[json-json_query/example--Results] >> test.py::test[join-grace_join1-map-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-map-Results] [SKIPPED] >> test.py::test[join-inner_grouped_by_expr--ForceBlocks] >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--false] [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] [GOOD] |88.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test.py::test[pg-tpcds-q38-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q39-default.txt-Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_tag_queue[tables_format_v1-std] [GOOD] >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf [GOOD] >> test.py::test[tpch-q18-default.txt-Results] [GOOD] >> test.py::test[type_v3-float--Results] >> test.py::test[join-inner_grouped_by_expr--ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped_by_expr--Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] >> test.py::test[aggregate-table_funcs_group_by-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-join_using-default.txt-Results] >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-precompute_tree-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_inner_select_fail--ForceBlocks] >> test.py::test[tpch-q3-default.txt-Results] [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime-pk_types33-all_types33-index33---] [GOOD] >> test.py::test[json-json_query/example--Results] [GOOD] >> test.py::test[key_filter-calc_dependent-default.txt-Results] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_when_deactivate_fat_tenant_creation_another_tenant_is_ok[enable_alter_database_create_hive_first--false] [GOOD] |88.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[tpch-q3-default.txt-Results] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> test.py::test[pg-tpcds-q39-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Results] >> test.py::test[expr-non_persistable_inner_select_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_inner_select_fail--Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--ForceBlocks] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test.py::test[join-inner_grouped_by_expr--Results] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--ForceBlocks] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test.py::test[type_v3-float--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Uint8-pk_types24-all_types24-index24---] [GOOD] |88.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test.py::test[ansi_idents-join_using-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_strict_binding--Results] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] [SKIPPED] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_ping.py::TestPing::test_error_on_cgi_parameters >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] >> test.py::test[key_filter-calc_dependent-default.txt-Results] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_Datetime64-pk_types37-all_types37-index37---] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[join-join_semi_correlation_in_order_by--ForceBlocks] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--Results] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test |88.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_dict--Results] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--Results] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] >> test.py::test[binding-table_regexp_strict_binding--Results] [GOOD] >> test.py::test[blocks-add_uint16--Results] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_ping.py::TestPing::test_error_on_cgi_parameters [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_2o-off-ForceBlocks] >> test_ping.py::TestPing::test_error_on_non_ping_path >> test_ping.py::TestPing::test_error_on_non_ping_path [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null_multi_key--Results] >> test_ping.py::TestPing::test_ping |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_is_operational_with_distconf [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions [GOOD] >> test_ping.py::TestPing::test_ping [GOOD] |88.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> test.py::test[flatten_by-flatten_dict--Results] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-ForceBlocks] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test.py::test[type_v3-ignore_v3_pragma--Results] [GOOD] >> test.py::test[udf-python_script--Results] >> test.py::test[blocks-add_uint16--Results] [GOOD] >> test.py::test[blocks-add_uint32--Results] >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] [GOOD] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] |88.4%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |88.4%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[join-lookupjoin_inner_2o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_empty_subq--ForceBlocks] >> test.py::test[key_filter-is_null_multi_key--Results] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter1--Results] [SKIPPED] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters [GOOD] |88.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_invalid_tag_queue[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[insert-append_missing_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-append_missing_null-default.txt-Results] >> test.py::test[udf-python_script--Results] [GOOD] >> test.py::test[udf-udaf--Results] >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> test.py::test[blocks-add_uint32--Results] [GOOD] >> test.py::test[blocks-block_input_mapreduce--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2-v3-Results] [SKIPPED] >> test.py::test[blocks-block_output_various_types--Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg_filter--Results] >> test_db_counters.py::TestStorageCounters::test_storage_counters[enable_separate_quotas] [GOOD] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] >> test.py::test[insert-append_missing_null-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-ForceBlocks] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] >> test.py::test[optimizers-yql-6038_direct_row--Results] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt-Results] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters [GOOD] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |88.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--false] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [SKIPPED] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--Results] [GOOD] >> test.py::test[blocks-compare_dates_floats_bools--Results] [SKIPPED] >> test.py::test[blocks-decimal_op_decimal--Results] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] >> test.py::test[udf-udaf--Results] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |88.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[join-lookupjoin_inner_empty_subq--Results] [GOOD] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Results] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[limit-limit_skip_take-default.txt-Results] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_3_sessions [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3054051) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_distconf.py::TestKiKiMRDistConfBasic::test_cluster_expand_with_distconf [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] [GOOD] >> test.py::test[view-file_inner_udf--Results] [SKIPPED] >> test.py::test[weak_field-few_source_different_columns--Results] |88.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] >> test.py::test[blocks-decimal_op_decimal--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_all--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> test.py::test[join-opt_on_opt_side--Results] >> test.py::test[column_group-hint_anon-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-single-Results] [SKIPPED] >> test.py::test[column_order-select_plain_nosimple-default.txt-ForceBlocks] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_ping.py::TestPing::test_ping [GOOD] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] >> test.py::test[insert-append_sorted-to_sorted_desc-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Results] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-Results] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> test.py::test[action-eval_column--Results] |88.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> test.py::test[weak_field-few_source_different_columns--Results] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--Results] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] [GOOD] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] |88.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[order_by-extract_members_over_sort_desc--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] [GOOD] >> test.py::test[insert-append_with_read_udf_fail--ForceBlocks] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [GOOD] >> test.py::test[join-opt_on_opt_side--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_all--Results] [GOOD] >> test.py::test[blocks-filter_expr--Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] >> test.py::test[join-premap_common_inner--Results] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] >> test.py::test[insert-append_with_read_udf_fail--ForceBlocks] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] |88.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--Results] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/py3test >> test_copy_table.py::TestCopyTable::test_copy_table[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |88.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--false] [GOOD] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--true] |88.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[insert-append_with_read_udf_fail--ForceBlocks] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] [GOOD] |88.8%| [TA] $(B)/ydb/tests/datashard/copy_table/test-results/py3test/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/tests/datashard/copy_table/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |88.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test.py::test[blocks-filter_expr--Results] [GOOD] >> test.py::test[blocks-interval_add_interval--Results] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [SKIPPED] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--Results] [GOOD] >> test.py::test[window-distinct_over_window--Results] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-fifo] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-std] >> test.py::test[hor_join-skip_yamr--Results] >> test.py::test[join-premap_common_inner--Results] [GOOD] >> test.py::test[join-premap_context_dep-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort2--Results] >> test.py::test[order_by-native_desc_publish--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[order_by-native_desc_publish--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_publish--Results] >> test.py::test[order_by-native_desc_publish--Results] [SKIPPED] >> test.py::test[pg-select_common_type_unionall--ForceBlocks] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v0-std] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] >> test.py::test[column_order-select_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Results] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [GOOD] >> test.py::test[blocks-top_sort_two_mix--ForceBlocks] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test.py::test[pg-tpcds-q59-default.txt-Results] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[schema-copy-read_schema-ForceBlocks] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[hor_join-skip_yamr--Results] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-ForceBlocks] >> test.py::test[column_order-select_subquery-default.txt-Results] [GOOD] >> test.py::test[count-count_all_grouped-empty-ForceBlocks] >> test.py::test[blocks-interval_add_interval--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] |88.9%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[action-eval_column--Results] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Results] >> test.py::test[pg-tpcds-q59-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-Results] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state >> test.py::test[join-premap_merge_extrasort2--Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap-off-Results] [SKIPPED] >> test.py::test[join-pullup_random--Results] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] [SKIPPED] |88.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types_without_predicate >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [GOOD] >> test.py::test[blocks-top_sort_two_mix--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Results] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[schema-copy-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-copy-read_schema-Results] >> test.py::test[count-count_all_grouped-empty-ForceBlocks] [GOOD] >> test.py::test[count-count_all_grouped-empty-Results] >> test.py::test[pg-select_common_type_unionall--ForceBlocks] [GOOD] >> test.py::test[pg-select_common_type_unionall--Results] >> test.py::test[insert-select_after_insert_relabeled-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[count-count_all_grouped-empty-Results] [GOOD] >> test.py::test[dq-precompute_parallel_indep--ForceBlocks] >> test.py::test[schema-copy-read_schema-Results] [GOOD] >> test.py::test[schema-insert-schema-ForceBlocks] >> test.py::test[blocks-top_sort_two_mix--Results] [GOOD] >> test.py::test[column_group-hint_non_lst_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_lst_yson_fail--Results] >> test.py::test[column_group-hint_non_lst_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_tmp-default.txt-ForceBlocks] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] [GOOD] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test.py::test[blocks-lazy_nonstrict_basic--Results] [GOOD] >> test.py::test[blocks-pg_call--Results] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-ForceBlocks] >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate [GOOD] >> test.py::test[window-distinct_over_window--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue >> test.py::test[action-eval_input_output_table_subquery--Results] [GOOD] >> test.py::test[action-insert_after_eval_xlock--Results] >> test.py::test[pg-tpcds-q64-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test.py::test[join-pullup_random--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Results] >> test.py::test[pg-select_common_type_unionall--Results] [GOOD] >> test.py::test[pg-select_subquery2-default.txt-ForceBlocks] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test.py::test[pg-tpcds-q84-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q88-default.txt-Results] >> test.py::test[schema-insert-schema-ForceBlocks] [GOOD] >> test.py::test[schema-insert-schema-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test.py::test[blocks-pg_call--Results] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] >> test_dynamic_tenants.py::test_check_access[enable_alter_database_create_hive_first--true] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--false] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test.py::test[action-insert_after_eval_xlock--Results] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] |89.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[blocks-pg_call--Results] [GOOD] >> test.py::test[dq-precompute_parallel_indep--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_parallel_indep--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_few_fields--ForceBlocks] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] [GOOD] >> test.py::test[insert-yql-13083--ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test.py::test[schema-insert-schema-Results] [GOOD] >> test.py::test[schema-read_schema_other--ForceBlocks] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] >> test.py::test[column_order-insert_tmp-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-insert_tmp-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test.py::test[pg-select_subquery2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery2-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test.py::test[pg-tpcds-q88-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test.py::test[column_order-insert_tmp-default.txt-Results] [GOOD] >> test.py::test[column_order-select_win_func-default.txt-ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] >> test.py::test[action-nested_rewrite_io-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [GOOD] |89.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test.py::test[join-pushdown_filter_over_left--Results] [GOOD] >> test.py::test[join-right_trivial-off-Results] [SKIPPED] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test.py::test[join-star_join--Results] >> test_public_api.py::TestBadSession::test_simple >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] >> test.py::test[schema-read_schema_other--ForceBlocks] [GOOD] >> test.py::test[schema-read_schema_other--Results] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] >> test.py::test[pg-select_subquery2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q08-default.txt-ForceBlocks] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test.py::test[flatten_by-flatten_few_fields--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Results] >> test.py::test[schema-read_schema_other--Results] [GOOD] >> test.py::test[schema-user_schema_override--ForceBlocks] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [GOOD] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test.py::test[insert-yql-13083--ForceBlocks] [GOOD] >> test.py::test[insert-yql-13083--Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types_without_predicate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_tags.py::TestQueueTags::test_untag_queue[tables_format_v1-fifo] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] |89.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test.py::test[pg-tpcds-q08-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q08-default.txt-Results] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] >> test.py::test[column_order-select_win_func-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_win_func-default.txt-Results] >> test.py::test[flatten_by-flatten_few_fields--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--ForceBlocks] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test.py::test[pg-tpch-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q19-default.txt-Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [GOOD] >> test.py::test[insert-yql-13083--Results] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-ForceBlocks] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] [GOOD] >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test.py::test[pg-tpcds-q08-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_override--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_override--Results] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] [GOOD] >> test.py::test[agg_apply-avg_const_interval--Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] |89.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test.py::test[schema-user_schema_override--Results] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-ForceBlocks] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test.py::test[column_order-select_win_func-default.txt-Results] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher >> test.py::test[dq-precompute_asyncfile--ForceBlocks] >> test.py::test[join-star_join--Results] [GOOD] >> test.py::test[join-star_join_semionly-off-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left--Results] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[flatten_by-flatten_with_group_by_expr--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] >> test.py::test[pg-tpcds-q31-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test.py::test[insert_monotonic-keep_meta-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test.py::test[pg-tpch-q19-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test.py::test[pg-tpcds-q31-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q66-default.txt-ForceBlocks] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] [GOOD] >> test.py::test[join-bush_dis_in--ForceBlocks] >> test.py::test[select-dict_lookup_by_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test.py::test[agg_apply-avg_const_interval--Results] [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] [GOOD] >> test.py::test[produce-process_sorted_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_join--ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-ForceBlocks] >> test.py::test[dq-precompute_asyncfile--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_asyncfile--Results] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_order_by_fail--ForceBlocks] >> data_correctness.py::TestDataCorrectness::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test.py::test[expr-non_persistable_order_by_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_order_by_fail--Results] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-ForceBlocks] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test.py::test[pg-tpcds-q66-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q66-default.txt-Results] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test.py::test[join-bush_dis_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in--Results] >> test.py::test[pg-tpcds-q66-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] >> test.py::test[pg-tpcds-q71-default.txt-ForceBlocks] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/typing.py:395: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/PyYAML/py3/yaml/error.py:6: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/protobuf/py3/google/protobuf/text_format.py:568: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] >> test.py::test[flatten_by-flatten_with_join--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Results] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] [GOOD] >> test.py::test[select-where_in-default.txt-ForceBlocks] >> test.py::test[hor_join-out_max_outtables-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] >> test.py::test[join-yql-14829_left--Results] [GOOD] >> test.py::test[join-yql-4275--Results] >> test_public_api.py::TestBadSession::test_simple [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test.py::test[pg-tpcds-q71-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q71-default.txt-Results] >> test.py::test[join-bush_dis_in--Results] [GOOD] >> test.py::test[join-bush_dis_in-off-ForceBlocks] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] [GOOD] >> test.py::test[in-in_noansi_join--ForceBlocks] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test.py::test[flatten_by-flatten_with_join--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_resource--ForceBlocks] >> test.py::test[pg-tpcds-q71-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q88-default.txt-ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test.py::test[select-where_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-where_in-default.txt-Results] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test.py::test[select-where_in-default.txt-Results] [GOOD] >> test.py::test[select-where_with_lambda--ForceBlocks] >> test.py::test[join-yql-4275--Results] [GOOD] >> test.py::test[key_filter-datetime-default.txt-Results] >> test.py::test[aggr_factory-bitor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] [GOOD] >> test.py::test[join-bush_dis_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in-off-Results] [SKIPPED] >> test.py::test[join-bush_in_in-off-ForceBlocks] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[flatten_by-flatten_with_resource--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_resource--Results] >> test.py::test[pg-tpcds-q88-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q88-default.txt-Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] [GOOD] >> test.py::test[produce-reduce_multi_out--Results] [SKIPPED] >> test.py::test[result_types-containers-default.txt-Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> test.py::test[flatten_by-flatten_with_resource--Results] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q88-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-ForceBlocks] >> test.py::test[select-where_with_lambda--ForceBlocks] [GOOD] >> test.py::test[select-where_with_lambda--Results] >> ttl_unavailable_s3.py::TestUnavailableS3::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test.py::test[join-bush_in_in-off-ForceBlocks] [GOOD] >> tier_delete.py::TestTierDelete::test_delete_s3_ttl >> test.py::test[join-bush_in_in-off-Results] [SKIPPED] >> test.py::test[join-count_bans-off-ForceBlocks] >> test.py::test[select-where_with_lambda--Results] [GOOD] >> test.py::test[table_range-range_over_filter_udf--ForceBlocks] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test.py::test[result_types-containers-default.txt-Results] [GOOD] >> test.py::test[sampling-sort-default.txt-Results] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Results] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] >> test.py::test[in-in_noansi_join--ForceBlocks] [GOOD] >> test.py::test[in-in_noansi_join--Results] >> test.py::test[pg-tpcds-q97-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Results] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[hor_join-out_hor_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] |89.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-select.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:1356: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:1356: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[table_range-range_over_filter_udf--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_filter_udf--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [GOOD] >> test.py::test[sampling-sort-default.txt-Results] [GOOD] >> test.py::test[sampling-system_sampling-io_block_size-Results] [SKIPPED] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test.py::test[join-count_bans-off-ForceBlocks] [GOOD] >> test.py::test[join-count_bans-off-Results] [SKIPPED] >> test.py::test[join-flatten_columns2--ForceBlocks] >> test.py::test[table_range-range_over_filter_udf--Results] [GOOD] >> test.py::test[table_range-table_funcs_expr--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher [GOOD] |89.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[sampling-system_sampling-io_block_size-Results] [SKIPPED] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] >> unstable_connection.py::TestUnstableConnection::test >> test.py::test[hor_join-out_hor_join-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted_by_tuple--ForceBlocks] >> test.py::test[aggr_factory-corellation-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test.py::test[join-flatten_columns2--ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns2--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test.py::test[in-in_sorted_by_tuple--ForceBlocks] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test.py::test[join-flatten_columns2--Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test.py::test[join-grace_join1-grace-ForceBlocks] >> test.py::test[table_range-table_funcs_expr--ForceBlocks] [GOOD] >> test.py::test[table_range-table_funcs_expr--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |89.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[pg-tpch-q02-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[aggr_factory-count_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test.py::test[table_range-table_funcs_expr--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[key_filter-datetime-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-14157--Results] >> test.py::test[in-in_sorted_by_tuple--Results] [GOOD] >> test.py::test[in-in_noansi_join--Results] [GOOD] >> test.py::test[insert-double_append_to_anonymous--ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] |89.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[in-in_noansi_join--Results] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test.py::test[join-grace_join1-grace-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-grace-Results] [SKIPPED] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] |89.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[join-grace_join1-grace-Results] [SKIPPED] >> test.py::test[type_v3-ignore_v3_hint-opt-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_read_raw_format_with_row_dispatcher [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3124272) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[pg-tpch-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q19-default.txt-ForceBlocks] >> test.py::test[insert-double_append_to_anonymous--ForceBlocks] [GOOD] |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |89.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test.py::test[type_v3-ignore_v3_hint-opt-Results] [GOOD] |89.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[insert-double_append_to_anonymous--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] |89.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[type_v3-ignore_v3_hint-opt-Results] [GOOD] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test.py::test[key_filter-yql-14157--Results] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] [GOOD] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] >> test.py::test[pg-tpch-q19-default.txt-ForceBlocks] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test.py::test[aggr_factory-top_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [GOOD] |89.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[pg-tpch-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-grouping_sets--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] [GOOD] >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. yielded = self.gen.throw(*exc_info) |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test.py::test[aggr_factory-udaf-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_tenants[enable_alter_database_create_hive_first--false] [GOOD] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[lineage-grouping_sets--Results] [GOOD] >> test.py::test[lineage-list_literal4-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-Results] [SKIPPED] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |89.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[lineage-select_field_order_by-default.txt-Results] [SKIPPED] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] |89.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |89.7%| [TA] $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] |89.8%| [TA] {RESULT} $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test.py::test[hor_join-merge_multiouts_reuse--Results] [SKIPPED] >> test.py::test[hor_join-out_hor_join-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_dict--Results] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_actorsystem.py::TestWithStorageNodeWith32Cpu::test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-fifo] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_select_limit_db_id.py::TestSelectLimitWithDbId::test_select_same_with_id[v1-mvp_external_ydb_endpoint0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_select_limit_db_id.py::TestSelectLimitWithDbId::test_select_same_with_id[v1-mvp_external_ydb_endpoint0] [SKIPPED] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/py3test >> test_leader_start_inflight.py::TestSqsMultinodeCluster::test_limit_leader_start_inflight[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_non_optional_field [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3060552) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |89.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] |89.8%| [TA] $(B)/ydb/tests/functional/sqs/large/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |89.9%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/large/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test.py::test[aggregate-group_by_expr_dict--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--Results] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test.py::test[hor_join-out_hor_join-default.txt-Results] [GOOD] >> test.py::test[hor_join-skip_yamr--Results] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_actorsystem.py::TestWithStorageNodeWith32Cpu::test [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test.py::test[hor_join-skip_yamr--Results] [GOOD] >> test.py::test[hor_join-table_record--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[aggregate-group_by_expr_lookup--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] [GOOD] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] |89.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> test.py::test[hor_join-table_record--Results] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithStorageNodeWith33Cpu::test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-aborted] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3214860) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith19Cpu::test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[in-in_enum_single0-default.txt-Results] [GOOD] >> test.py::test[insert-after_group_by-default.txt-Results] |90.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith19Cpu::test [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> test_actorsystem.py::TestWithStorageNodeWith33Cpu::test [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test.py::test[insert-after_group_by-default.txt-Results] [GOOD] >> test.py::test[insert-append--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_public_api.py::TestRecursiveCreation::test_mkdir >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] |90.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test.py::test[insert-append--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith1Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/kpz1/0010d8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> test_actorsystem.py::TestWithStorageNodeWith34Cpu::test |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] [GOOD] >> test.py::test[insert-from_erasure_to_none--Results] |90.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [GOOD] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_actorsystem.py::TestWithHybridNodeWith1Cpu::test [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test.py::test[insert-from_erasure_to_none--Results] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_actorsystem.py::TestWithComputeNodeWith7Cpu::test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::TestSessionPool [GOOD] |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |90.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithComputeNodeWith16Cpu::test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithStorageNodeWith34Cpu::test [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] [GOOD] >> test.py::test[insert-override-with_read_udf-Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test_actorsystem.py::TestWithComputeNodeWith7Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith20Cpu::test |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-plan] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith16Cpu::test [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] |90.3%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |90.3%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} >> test_public_api.py::TestAttributes::test_create_table >> test.py::test[insert-override-with_read_udf-Results] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_actorsystem.py::TestWithHybridNodeWith20Cpu::test [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-select_subquery--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_actorsystem.py::TestWithStorageNodeWith35Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-result_sets] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-3.test] >> test_actorsystem.py::TestWithComputeNodeWith8Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |90.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithComputeNodeWith17Cpu::test >> test.py::test[insert-select_subquery--Results] [GOOD] >> test.py::test[insert-yql-13083-existig-Results] >> test_sql.py::TestCanonicalFolder1::test_case[compute/scheduler.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-plan] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_actorsystem.py::TestWithComputeNodeWith8Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_actorsystem.py::TestWithComputeNodeWith27Cpu::test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith21Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith17Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] >> test.py::test[insert-yql-13083-existig-Results] [GOOD] >> test.py::test[insert_monotonic-from_empty--Results] >> test_sql.py::TestCanonicalFolder1::test_case[dt.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith21Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-result_sets] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> test_cp_ic.py::TestCpIc::test_discovery >> test_actorsystem.py::TestWithComputeNodeWith27Cpu::test [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith35Cpu::test [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> test_actorsystem.py::TestWithComputeNodeWith38Cpu::test >> test.py::test[insert_monotonic-from_empty--Results] [GOOD] >> test.py::test[insert_monotonic-overlaping_fail--Results] [SKIPPED] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_actorsystem.py::TestWithComputeNodeWith9Cpu::test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_actorsystem.py::TestWithHybridNodeWith22Cpu::test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> test_actorsystem.py::TestWithComputeNodeWith18Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-result_sets] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith9Cpu::test [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test.py::test[insert_monotonic-truncate_and_append-default.txt-Results] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-off-Results] [SKIPPED] >> test.py::test[join-anyjoin_common_nodup-off-Results] [SKIPPED] >> test.py::test[join-bush_in--Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] >> test_public_api.py::TestDocApiTables::test_create_table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3305503) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-plan] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithComputeNodeWith18Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_actorsystem.py::TestWithHybridNodeWith22Cpu::test [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith28Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/insert_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-plan] |90.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-3.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithHybridNodeWith35Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-result_sets] >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test.py::test[join-bush_in--Results] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith10Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:589: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:287: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d7f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d7f/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk5/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3170064 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> test_actorsystem.py::TestWithComputeNodeWith38Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback |90.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[join-bush_in--Results] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> test_actorsystem.py::TestWithHybridNodeWith2Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithComputeNodeWith19Cpu::test |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithComputeNodeWith28Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-result_sets] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_actorsystem.py::TestWithHybridNodeWith10Cpu::test [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith2Cpu::test [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith32Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith19Cpu::test [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/botocore/py3/botocore/hooks.py:653: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/botocore/py3/botocore/hooks.py:653: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith11Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[json/json_query.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-result_sets] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[dynumber/select_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[explain.script-script] >> test_actorsystem.py::TestWithHybridNodeWith11Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] >> test_actorsystem.py::TestWithHybridNodeWith35Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-result_sets] |90.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_from_table.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-plan] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[explain.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_actorsystem.py::TestWithHybridNodeWith30Cpu::test |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefonly.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith1Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_actorsystem.py::TestWithComputeNodeWith39Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_actorsystem.py::TestWithComputeNodeWith32Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |90.6%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith29Cpu::test |90.6%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith23Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_lefsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-plan] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith1Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_sql.py::TestCanonicalFolder1::test_case[json/select_params.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_actorsystem.py::TestWithHybridNodeWith30Cpu::test [GOOD] >> test_dispatch.py::TestMapping::test_mapping >> test_sql.py::TestCanonicalFolder1::test_case[index/select_using_index_only.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-plan] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-result_sets] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-result_sets] >> test_retry.py::TestRetry::test_fail_first[kikimr0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_actorsystem.py::TestWithHybridNodeWith36Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith29Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q22.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-plan] |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_on_top_of_apply.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-plan] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test |90.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith39Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_left_null.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith33Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:677: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=f'{self.folder_id}_other') ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:683: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token='usr_alkoberkanavt_5', folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:689: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=False, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:695: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id=self.folder_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:703: ResourceWarning: unclosed self._sqs_api = self._create_api_for_user('ignored', raise_on_error=True, force_private=True, iam_token=self.iam_token, folder_id='FOLDER_alkonavt') ResourceWarning: Enable tracemalloc to get the object allocation traceback |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith36Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_actorsystem.py::TestWithHybridNodeWith31Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith12Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[order_by/order_by_pk_composite.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q3.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_aliases_and_apply.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith10Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-result_sets] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-result_sets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-plan] >> data_correctness.py::TestDataCorrectness::test [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-result_sets] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith10Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_bool.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-plan] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-3.test] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_nonselector_aliases.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-result_sets] |90.7%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql.py::TestCanonicalFolder1::test_case[simple/q4.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-plan] |90.7%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithHybridNodeWith36Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_predicate_right_2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_actorsystem.py::TestWithHybridNodeWith31Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith2Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith33Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client0-year Int32-False] |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith20Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith37Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_comparison_empty_string.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-result_sets] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q5.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-plan] >> test_s3_1.py::TestS3::test_write_result[v1-kikimr_params0-client0] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_range_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith2Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith27Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith11Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-plan] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-result_sets] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-result_sets] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-12.test] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_composite.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-result_sets] >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith30Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith11Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith32Cpu::test >> test_dispatch.py::TestMapping::test_mapping [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q6.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith3Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_right_key_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_in_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q10.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-plan] >> test_retry.py::TestRetry::test_low_rate[kikimr0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith27Cpu::test [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith3Cpu::test [GOOD] >> test_dispatch.py::TestMapping::test_idle >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith37Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_rightsemi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q7.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q11.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith34Cpu::test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client0-year Int32-False] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client1-year Int32 NOT NULL-False] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> test_actorsystem.py::TestWithComputeNodeWith4Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith37Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_actorsystem.py::TestWithStorageNodeWith12Cpu::test >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v1-client0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_dependent_nopush.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client1-year Int32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client2-year Uint32-False] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> test_actorsystem.py::TestWithComputeNodeWith30Cpu::test [GOOD] |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases.sql-result_sets] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_actorsystem.py::TestWithHybridNodeWith32Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith4Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_simple_c.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith12Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q12.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith28Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_null_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client2-year Uint32-False] [GOOD] |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client3-year Uint32 NOT NULL-True] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-result_sets] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> test_actorsystem.py::TestWithComputeNodeWith5Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-plan] [GOOD] >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-result_sets] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client3-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client4-year Int64-False] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-plan] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[udfs/math.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q13.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_using_index.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3338216) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-plan] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith5Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith34Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q9.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/script_params.script-script] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client4-year Int64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client5-year Int64 NOT NULL-False] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_3.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith33Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith37Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith13Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/script_params.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[table_types.script-script] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client6-year Uint64-False] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith28Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q14.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_by_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_with_agg.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith38Cpu::test |90.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_point_range_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_4.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[table_types.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-result_sets] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client6-year Uint64-False] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith13Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client7-year Uint64 NOT NULL-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] [GOOD] >> test_s3_1.py::TestS3::test_write_result[v1-kikimr_params0-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-result_sets] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-false-client0] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_on.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d85/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d85/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3154625 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[simple/q15.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client7-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client8-year String NOT NULL-True] |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith6Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_from_table.sql-result_sets] [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings0-client0] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_5.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client8-year String NOT NULL-True] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client9-year String-False] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith33Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith29Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-result_sets] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-result_sets] |90.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/PyHamcrest/py3/hamcrest/core/base_description.py:43: DeprecationWarning: Call append_description_of instead of append_value ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d78/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d78/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk6/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3178466 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/delete_same.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q16.sql-result_sets] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client9-year String-False] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client10-year Utf8-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_public_metrics.py::TestPublicMetrics::test_public_metrics[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_public_metrics/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_public_metrics/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3375214) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3379636 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_literal.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-8.test] |90.9%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> test_test_connection.py::TestConnection::test_test_s3_connection[v1-client0] |90.9%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> test_actorsystem.py::TestWithComputeNodeWith35Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith14Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_6.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_random_chars_ranges.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-plan] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-4.test] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith38Cpu::test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client10-year Utf8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client11-year Utf8 NOT NULL-True] >> test_actorsystem.py::TestWithStorageNodeWith5Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-result_sets] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client11-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client12-year Date-False] >> test_actorsystem.py::TestWithStorageNodeWith21Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[write/insert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-plan] >> test_validation.py::TestS3::test_empty[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith14Cpu::test [GOOD] >> test_dispatch.py::TestMapping::test_idle [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q17.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith38Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_7.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-result_sets] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client12-year Date-False] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_pk.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client13-year Date NOT NULL-True] >> test_actorsystem.py::TestWithStorageNodeWith29Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith5Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( library/python/pytest/plugins/ya.py:563: ResourceWarning: unclosed pyfuncitem.retval = testfunction(**testargs) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client13-year Date NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client14-year Datetime-False] >> test_sql.py::TestCanonicalFolder1::test_case[write/insert_revert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage.script-script] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-result_sets] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-False-client0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_equi_multi_rp_1.sql-result_sets] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith21Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q18.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-plan] >> test_test_connection.py::TestConnection::test_test_s3_connection[v1-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_8.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3352495) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_nonkey_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-plan] >> test_test_connection.py::TestConnection::test_test_s3_connection[v2-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v1-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v1-client0] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-false-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v2-client0] >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-true-client0] >> test_test_connection.py::TestConnection::test_test_s3_connection_uri[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client14-year Datetime-False] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_multi_range_skip.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client15-year Datetime NOT NULL-True] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage_key.script-script] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v1-client0] [GOOD] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-plan] >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-result_sets] |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith31Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_s3_1.py::TestS3::test_top_level_listing_2[v2-kikimr_params0-true-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings0-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-result_sets] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings0-client0] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-false-client15-year Datetime NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client0-year Int32-False] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_usage_key.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_actorsystem.py::TestWithComputeNodeWith35Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q19.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_9.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-plan] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_actorsystem.py::TestWithStorageNodeWith2Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith6Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith38Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client0-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client1-year Int32 NOT NULL-False] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-false-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-result_sets] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-true-client0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_actorsystem.py::TestWithStorageNodeWith22Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-result_sets] |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[json/insert_params.sql-plan] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith2Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/multi_write.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_actorsystem.py::TestWithStorageNodeWith6Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[range_skip_take.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client1-year Int32 NOT NULL-False] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client2-year Uint32-False] >> test_s3_1.py::TestS3::test_top_level_listing_2[v1-kikimr_params0-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v2-false-client0] >> test_actorsystem.py::TestWithStorageNodeWith7Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_rp_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-plan] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index/topsort_index_with_selector_aliases_2.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-plan] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-result_sets] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-4.test] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v2[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-result_sets] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_actorsystem.py::TestWithStorageNodeWith30Cpu::test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client2-year Uint32-False] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client3-year Uint32 NOT NULL-True] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith22Cpu::test [GOOD] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-False-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/replace.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-plan] >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-True-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client3-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client4-year Int64-False] >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[select_result_limit.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/ct.script-script] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q20.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith39Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_range_single_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] >> test_actorsystem.py::TestWithStorageNodeWith7Cpu::test [GOOD] |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith15Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[index_predicate_point.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/group_by_lookup.script-script] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-result_sets] >> test_yq_v2.py::TestS3::test_yqv2_enabled[v2-True-client0] [GOOD] >> test_yq_v2.py::TestS3::test_removed_database_path[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[simple/ct.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client4-year Int64-False] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client5-year Int64 NOT NULL-False] >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] [GOOD] >> TStateStorageTest::ShouldCountStates [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint >> test_sql.py::TestCanonicalFolder1::test_case[join/group_by_lookup.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-plan] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith36Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client6-year Uint64-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] |91.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[topsort/topsort_pk.sql-plan] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_test_connection.py::TestConnection::test_test_s3_connection_error[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_test_connection/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_test_connection/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3419460) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3423484 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-result_sets] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings0-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings0-client0] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-false-client0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_actorsystem.py::TestWithHybridNodeWith39Cpu::test >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-true-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2025-05-05T09:52:37.817796Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-05-05T09:52:37.879829Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-05-05T09:52:38.005854Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-05-05T09:52:39.570965Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2025-05-05T09:52:39.623462Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-05-05T09:52:39.623493Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_dict_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithStorageNodeWith30Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_yq_v2.py::TestS3::test_removed_database_path[v2-client0] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith23Cpu::test >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client6-year Uint64-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client7-year Uint64 NOT NULL-False] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_double_lookup.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-plan] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-false-client0] >> test_sql.py::TestCanonicalFolder1::test_case[write/update_on.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-plan] >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithStorageNodeWith8Cpu::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> test_actorsystem.py::TestWithHybridNodeWith4Cpu::test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client7-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client8-year String NOT NULL-True] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_list_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v2-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-true-client0] >> test_s3_1.py::TestS3::test_precompute[v2-true-client0] >> test_actorsystem.py::TestWithStorageNodeWith8Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client8-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client9-year String-False] >> test_actorsystem.py::TestWithHybridNodeWith4Cpu::test [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith23Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_dup_column_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-plan] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith34Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_explicit_partitioning_0.py::TestS3::test_partitioned_by[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-false-client0] >> test_actorsystem.py::TestWithStorageNodeWith39Cpu::test [GOOD] >> test_s3_1.py::TestS3::test_precompute[v2-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v1-false-client0] >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith10Cpu::test >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-result_sets] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[simple/empty_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-result_sets] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client9-year String-False] [GOOD] >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client10-year Utf8-False] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join1.test] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-false-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings0-client0] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-true-client0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings0-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> tier_delete.py::TestTierDelete::test_delete_s3_ttl [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d7c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d7c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {}, portions: 0 rows by tier: {'__DEFAULT': 100000}, portions: 2 contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3171560 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_actorsystem.py::TestWithComputeNodeWith36Cpu::test [GOOD] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings0-client0] [SKIPPED] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings1-client0] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> test_sql.py::TestCanonicalFolder1::test_case[join/join_range_right.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[write/upsert_cast.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-result_sets] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/q21.sql-plan] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith10Cpu::test [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> test_s3_1.py::TestS3::test_precompute[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_precompute[v1-true-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection[v2-true-client0] [GOOD] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_inflight.py::TestS3::test_data_inflight[v1-client0-kikimr_params0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-false-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-plan] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client10-year Utf8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client11-year Utf8 NOT NULL-True] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/multi_select.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithHybridNodeWith39Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-false-client0] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join1.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_s3_1.py::TestS3::test_precompute[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-true-client0] >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test >> test_s3_1.py::TestS3::test_failed_precompute[v2-false-client0] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:109: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:751: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:748: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:755: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing/rules.py:756: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith5Cpu::test >> test_actorsystem.py::TestWithStorageNodeWith24Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup.sql-result_sets] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client11-year Utf8 NOT NULL-True] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client12-year Date-False] |91.2%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] [GOOD] |91.2%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3355104) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection[v1-true-client0] [GOOD] |91.2%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-false-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2025-05-05T09:52:54.227002Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7500897416751015941:2048] with connection to localhost:14308:local 2025-05-05T09:52:54.227055Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:52:54.392355Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:52:54.392385Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:52:54.393313Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:52:54.513260Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:52:54.513273Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:52:54.513396Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-05-05T09:52:54.545979Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-05-05T09:52:54.545994Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-05-05T09:52:54.547226Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T09:52:54.570741Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-05-05T09:52:54.570759Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T09:52:54.997776Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7500897419736480338:2048] with connection to localhost:14308:local 2025-05-05T09:52:54.997819Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:52:55.028910Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:52:55.028929Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:52:55.029072Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-05-05T09:52:55.077999Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-05-05T09:52:55.078018Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-05-05T09:52:55.504163Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7500897423395075097:2048] with connection to localhost:14308:local 2025-05-05T09:52:55.504226Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:52:55.545416Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:52:55.545440Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:52:55.546231Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-05-05T09:52:55.586523Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-05-05T09:52:55.586543Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-05-05T09:52:56.051302Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7500897427384340583:2048] with connection to localhost:14308:local 2025-05-05T09:52:56.051364Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:52:56.096996Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:52:56.097018Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:52:56.097267Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:52:56.225272Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:52:56.225293Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:52:56.225532Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-05-05T09:52:56.267433Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2025-05-05T09:52:56.267454Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-05-05T09:52:56.563333Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7500897428976358053:2048] with connection to localhost:14308:local 2025-05-05T09:52:56.563419Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:52:56.601781Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:52:56.601804Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:52:56.606989Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:52:56.775036Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:52:56.775047Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:52:56.775209Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:52:56.859063Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-05-05T09:52:56.859077Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:52:56.859262Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-05-05T09:52:56.898036Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-05-05T09:52:56.898051Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-05-05T09:52:56.899119Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-05-05T09:52:56.923498Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-05-05T09:52:56.923512Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse |91.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith5Cpu::test [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v2-false-client0] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-result_sets] >> test_s3_1.py::TestS3::test_failed_precompute[v2-true-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client12-year Date-False] [GOOD] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client13-year Date NOT NULL-True] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-true-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered >> test_actorsystem.py::TestWithHybridNodeWith13Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith11Cpu::test >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client13-year Date NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client14-year Datetime-False] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-plan] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_yq_v2.py::TestS3::test_query_parameters[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd0/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_yq_v2/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd0/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_yq_v2/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3430588) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3434675 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_actorsystem.py::TestWithStorageNodeWith24Cpu::test [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v2-true-client0] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith21Cpu::test >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> test_s3_1.py::TestS3::test_failed_precompute[v1-false-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_actorsystem.py::TestWithHybridNodeWith6Cpu::test >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> test_explicit_partitioning_0.py::TestS3::test_pruning[v2-true-client0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-false-client0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith9Cpu::test [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-result_sets] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client14-year Datetime-False] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client15-year Datetime NOT NULL-True] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.gz-gzip] >> test_actorsystem.py::TestWithHybridNodeWith13Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith11Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-true-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_cast2.sql-result_sets] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith6Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2025-05-05T09:53:01.291308Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7500897450730587206:2048] with connection to localhost:2699:local 2025-05-05T09:53:01.291354Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:01.563274Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:01.563288Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:01.566993Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:01.587872Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2025-05-05T09:53:01.587886Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:02.074404Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7500897449878188775:2048] with connection to localhost:2699:local 2025-05-05T09:53:02.074450Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:02.187136Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2025-05-05T09:53:02.187159Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:02.596139Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7500897455726989778:2048] with connection to localhost:2699:local 2025-05-05T09:53:02.596216Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:02.635946Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:02.635965Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:02.636169Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:02.751560Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:02.751590Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:02.751813Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:02.783509Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Constraint violated. Table: `local/TStorageServiceTestShouldNotCreateCheckpointTwice/checkpoints_metadata`., code: 2012
: Error: Conflict with existing key., code: 2012 2025-05-05T09:53:02.783531Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:03.081143Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7500897453410243803:2048] with connection to localhost:2699:local 2025-05-05T09:53:03.081187Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:03.112139Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:03.112154Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:03.112263Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:03.166809Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-05-05T09:53:03.166829Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:53:03.411987Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7500897458288102537:2048] with connection to localhost:2699:local 2025-05-05T09:53:03.412035Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:03.440607Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:03.440625Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:03.440787Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:03.572033Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:03.572051Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:03.574470Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:03.612524Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-05-05T09:53:03.612538Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:03.612721Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:03.639436Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-05-05T09:53:03.639452Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v2-true-client15-year Datetime NOT NULL-True] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client0-year Int32-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test.csv-csv_with_names-kikimr_settings0] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-strings.test] >> test_explicit_partitioning_0.py::TestS3::test_pruning[v1-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_failed_precompute[v1-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_validation[v2-client0] >> test_s3_1.py::TestS3::test_failed_precompute[v1-true-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-result_sets] >> test_explicit_partitioning_0.py::TestS3::test_validation[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_validation[v1-client0] >> test_actorsystem.py::TestWithComputeNodeWith21Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-result_sets] >> test_explicit_partitioning_0.py::TestS3::test_validation[v1-client0] [GOOD] >> test_inflight.py::TestS3::test_data_inflight[v1-client0-kikimr_params0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-false-client0] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params1] >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-strings.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith31Cpu::test [GOOD] >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> test_actorsystem.py::TestWithStorageNodeWith3Cpu::test >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_dup_c_left.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-plan] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client0-year Int32-False] [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client1-year Int32 NOT NULL-False] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[write/write_group_by.script-script] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith25Cpu::test >> test_s3_1.py::TestS3::test_failed_precompute[v1-true-client0] [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> test_s3_1.py::TestS3::test_missed[v2-false-client0] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-false-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-true-client0] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-result_sets] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> test_actorsystem.py::TestWithStorageNodeWith3Cpu::test [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client1-year Int32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client2-year Uint32-False] >> test_actorsystem.py::TestWithHybridNodeWith14Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith12Cpu::test |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[simple/null_select.sql-plan] [GOOD] >> test_s3_1.py::TestS3::test_missed[v2-false-client0] [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> test_s3_1.py::TestS3::test_missed[v2-true-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_equi.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-plan] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> TStorageServiceTest::ShouldCreateCheckpoint >> overlapping_portions.py::TestOverlappingPortions::test >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v2-true-client0] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith7Cpu::test >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-false-client0] >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints |91.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-window.test] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client2-year Uint32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client3-year Uint32 NOT NULL-True] >> test_bindings_1.py::TestBindings::test_s3_insert[v2-kikimr_settings0-client0] >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-result_sets] >> test_actorsystem.py::TestWithComputeNodeWith12Cpu::test [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-result_sets] >> test_actorsystem.py::TestWithHybridNodeWith14Cpu::test [GOOD] >> test_s3_1.py::TestS3::test_missed[v2-true-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v1-kikimr_settings1-client0] [GOOD] >> test_s3_1.py::TestS3::test_missed[v1-false-client0] >> test_actorsystem.py::TestWithComputeNodeWith22Cpu::test >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings1-client0] >> test_actorsystem.py::TestWithStorageNodeWith25Cpu::test [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState >> test_actorsystem.py::TestWithHybridNodeWith24Cpu::test |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith3Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client3-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client4-year Int64-False] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.gz-gzip] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.lz4-lz4] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-false-client0] [GOOD] |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join2.test] [GOOD] >> TStorageServiceTest::ShouldGetState [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-true-client0] >> test_actorsystem.py::TestWithHybridNodeWith7Cpu::test [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-12.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_inner.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-plan] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_s3_1.py::TestS3::test_missed[v1-false-client0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_s3_1.py::TestS3::test_missed[v1-true-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithComputeNodeWith13Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client4-year Int64-False] [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.lz4-lz4] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_schema_columns_except_partitioning_ones[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client5-year Int64 NOT NULL-False] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.br-brotli] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-result_sets] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2025-05-05T09:53:12.580937Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7500897500240999820:2048] with connection to localhost:16549:local 2025-05-05T09:53:12.580995Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:12.615862Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:12.615887Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:12.616062Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:12.738134Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:12.738153Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:13.184839Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7500897500224440687:2048] with connection to localhost:16549:local 2025-05-05T09:53:13.184890Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:13.235002Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:13.235028Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:13.235293Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:13.346135Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:13.346150Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:13.346571Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T09:53:13.371050Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-05-05T09:53:13.371074Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T09:53:13.371205Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-05-05T09:53:13.400176Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-05-05T09:53:13.400194Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-05-05T09:53:13.401057Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T09:53:13.432676Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T09:53:13.904805Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7500897504666075499:2048] with connection to localhost:16549:local 2025-05-05T09:53:13.904849Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:13.936052Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:13.936072Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:13.936233Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:14.074834Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:14.074851Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:14.075321Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:14.141000Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-05-05T09:53:14.141024Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:53:14.141608Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T09:53:14.171502Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-05-05T09:53:14.171521Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T09:53:14.172020Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:14.196889Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-05-05T09:53:14.196911Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:53:14.197224Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-05-05T09:53:14.222524Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-05-05T09:53:14.222542Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-05-05T09:53:14.222845Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-05-05T09:53:14.253540Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2025-05-05T09:53:14.253563Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-05-05T09:53:14.258991Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2025-05-05T09:53:14.283020Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2025-05-05T09:53:14.283038Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2025-05-05T09:53:14.290984Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T09:53:14.338582Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T09:53:14.819889Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7500897508816189466:2048] with connection to localhost:16549:local 2025-05-05T09:53:14.819946Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:14.849577Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:14.849594Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:14.849744Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:14.964615Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:14.964628Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:14.965669Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-05-05T09:53:14.981942Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-05-05T09:53:14.982007Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-05-05T09:53:14.983085Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2025-05-05T09:53:14.983101Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2025-05-05T09:53:15.057897Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2025-05-05T09:53:15.057933Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2025-05-05T09:53:15.057944Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2025-05-05T09:53:15.060478Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2025-05-05T09:53:15.119379Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2025-05-05T09:53:15.119415Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2025-05-05T09:53:15.120057Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> test_formats.py::TestS3Formats::test_format[v2-test.csv-csv_with_names-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test.tsv-tsv_with_names-kikimr_settings0] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_s3_1.py::TestS3::test_missed[v1-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-false-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-false-client0] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith22Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-true-client0] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.bz2-bzip2] >> TStorageServiceTest::ShouldRegister >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftonly.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith13Cpu::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_actorsystem.py::TestWithHybridNodeWith24Cpu::test [GOOD] >> test_s3_0.py::TestS3::test_csv[v2-false-client0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_int_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-plan] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataset] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-false-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client6-year Uint64-False] >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test.tsv-tsv_with_names-kikimr_settings0] [GOOD] |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_formats.py::TestS3Formats::test_format[v2-test_each_row.json-json_each_row-kikimr_settings0] >> test_s3_1.py::TestS3::test_simple_hits_47[v2-true-client0] >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint >> test_actorsystem.py::TestWithHybridNodeWith15Cpu::test >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.bz2-bzip2] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.zst-zstd] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldSaveState >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-false-client0] >> TStateStorageTest::ShouldDeleteNoCheckpoints >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.zst-zstd] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-result_sets] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.xz-xz] >> test_formats.py::TestS3Formats::test_format[v2-test_each_row.json-json_each_row-kikimr_settings0] [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 >> test_formats.py::TestS3Formats::test_format[v2-test_list.json-json_list-kikimr_settings0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client6-year Uint64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client7-year Uint64 NOT NULL-False] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints >> test_s3_1.py::TestS3::test_simple_hits_47[v2-true-client0] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-false-client0] >> test_actorsystem.py::TestWithHybridNodeWith8Cpu::test >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteGraph >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_leftsemi.sql-result_sets] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-false-client0] [GOOD] >> TStorageServiceTest::ShouldUseGc [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-plan] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-true-client0] >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldGetMultipleStates >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_actorsystem.py::TestWithHybridNodeWith15Cpu::test [GOOD] >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-false-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-plan] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-true-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client7-year Uint64 NOT NULL-False] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v2-test.json.xz-xz] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_formats.py::TestS3Formats::test_format[v2-test_list.json-json_list-kikimr_settings0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client8-year String NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.gz-gzip] >> test_formats.py::TestS3Formats::test_format[v2-test.parquet-parquet-kikimr_settings0] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> test_explicit_partitioning_0.py::TestS3::test_projection_date[v1-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v2-client0] >> test_actorsystem.py::TestWithHybridNodeWith8Cpu::test [GOOD] >> test_bindings_1.py::TestBindings::test_s3_insert[v2-kikimr_settings0-client0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2025-05-05T09:53:19.755446Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7500897528032747431:2048] with connection to localhost:26850:local 2025-05-05T09:53:19.755514Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:19.939831Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:19.939851Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:20.262815Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7500897532314737048:2048] with connection to localhost:26850:local 2025-05-05T09:53:20.262868Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:20.297266Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:20.297285Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:20.299401Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:20.334902Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-05-05T09:53:20.334943Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:20.335132Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:20.361985Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2025-05-05T09:53:20.362003Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:20.751171Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7500897533464385364:2048] with connection to localhost:26850:local 2025-05-05T09:53:20.751245Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:20.784363Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:20.784385Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:20.784518Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:20.906528Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:20.906545Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:20.906740Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:20.971428Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-05-05T09:53:20.971446Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:53:20.971651Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T09:53:20.998346Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-05-05T09:53:20.998365Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T09:53:20.998632Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:21.022451Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-05-05T09:53:21.022478Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:53:21.022669Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-05-05T09:53:21.050797Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-05-05T09:53:21.050816Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-05-05T09:53:21.051087Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T09:53:21.091876Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T09:53:21.418418Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7500897537212069944:2048] with connection to localhost:26850:local 2025-05-05T09:53:21.418472Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:21.448725Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:21.448745Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:21.454296Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:21.588590Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:21.588620Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:21.594981Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-05-05T09:53:21.614014Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-05-05T09:53:21.614060Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-05-05T09:53:22.038605Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7500897537851221110:2048] with connection to localhost:26850:local 2025-05-05T09:53:22.038624Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7500897542146188507:2130] 2025-05-05T09:53:22.038656Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-05-05T09:53:22.075125Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-05-05T09:53:22.075143Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-05-05T09:53:22.075345Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-05-05T09:53:22.200635Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-05-05T09:53:22.200652Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-05-05T09:53:22.200870Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:22.271138Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-05-05T09:53:22.271152Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:53:22.273281Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-05-05T09:53:22.299526Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2025-05-05T09:53:22.299544Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2025-05-05T09:53:22.299554Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-05-05T09:53:22.299658Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2025-05-05T09:53:22.299783Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-05-05T09:53:22.329724Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-05-05T09:53:22.329741Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-05-05T09:53:22.329889Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:22.352372Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-05-05T09:53:22.352401Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:53:22.352554Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-05-05T09:53:22.376239Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-05-05T09:53:22.376261Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2025-05-05T09:53:22.376274Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-05-05T09:53:22.376309Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2025-05-05T09:53:22.376383Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-05-05T09:53:22.377272Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2025-05-05T09:53:22.382818Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2025-05-05T09:53:22.413993Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-05-05T09:53:22.414011Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-05-05T09:53:22.414971Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2025-05-05T09:53:22.447035Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2025-05-05T09:53:22.447053Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2025-05-05T09:53:22.448747Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2025-05-05T09:53:22.475032Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2025-05-05T09:53:22.475052Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2025-05-05T09:53:22.475068Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2025-05-05T09:53:22.475110Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2025-05-05T09:53:22.475274Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T09:53:22.485229Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2025-05-05T09:53:22.506199Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T09:53:22.606549Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T09:53:22.609697Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-05-05T09:53:22.710065Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-05-05T09:53:22.712957Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v1-client0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_insert[v1-kikimr_settings0-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_validate_columns[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client8-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client9-year String-False] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-result_sets] >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith23Cpu::test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |91.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> test_formats.py::TestS3Formats::test_format[v2-test.parquet-parquet-kikimr_settings0] [GOOD] >> test_s3_1.py::TestS3::test_simple_hits_47[v1-true-client0] [GOOD] >> test_bindings_0.py::TestBindings::test_binding_operations[v2-kikimr_settings1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.csv-csv_with_names-kikimr_settings0] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-True-client0] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings1-client0] >> test_actorsystem.py::TestWithHybridNodeWith16Cpu::test >> test_actorsystem.py::TestWithComputeNodeWith14Cpu::test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith25Cpu::test >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.gz-gzip] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-false-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.lz4-lz4] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-true-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_early_finish.py::TestEarlyFinish::test_early_finish[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cc0/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_early_finish/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cc0/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_early_finish/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3469185) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3473415 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-False-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_ranges_rp_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-plan] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith4Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client9-year String-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client10-year Utf8-False] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> test_formats.py::TestS3Formats::test_format[v1-test.csv-csv_with_names-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.tsv-tsv_with_names-kikimr_settings0] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.lz4-lz4] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.br-brotli] >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-false-client0] >> test_validation.py::TestS3::test_empty[v2-client0] [GOOD] >> test_validation.py::TestS3::test_empty[v1-client0] >> test_actorsystem.py::TestWithHybridNodeWith16Cpu::test [GOOD] >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_actorsystem.py::TestWithComputeNodeWith14Cpu::test [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-exact_file-False-client0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test.tsv-tsv_with_names-kikimr_settings0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-True-client0] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test_each_row.json-json_each_row-kikimr_settings0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-result_sets] >> test_bindings_1.py::TestBindings::test_s3_insert[v1-kikimr_settings0-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client10-year Utf8-False] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client11-year Utf8 NOT NULL-True] >> test_s3_0.py::TestS3::test_csv[v2-false-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv[v2-true-client0] >> test_formats.py::TestS3Formats::test_format[v1-test_each_row.json-json_each_row-kikimr_settings0] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test_list.json-json_list-kikimr_settings0] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-true-client0] >> test_actorsystem.py::TestWithComputeNodeWith23Cpu::test [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.bz2-bzip2] >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_s3_0.py::TestS3::test_csv[v2-true-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse.sql-result_sets] [GOOD] >> test_formats.py::TestS3Formats::test_format[v1-test_list.json-json_list-kikimr_settings0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-plan] >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataset] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-True-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv[v1-false-client0] >> test_formats.py::TestS3Formats::test_format[v1-test.parquet-parquet-kikimr_settings0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client11-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client12-year Date-False] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-False-client0] >> test_actorsystem.py::TestWithHybridNodeWith25Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataにちは% set] >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.bz2-bzip2] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.zst-zstd] >> test_formats.py::TestS3Formats::test_format[v1-test.parquet-parquet-kikimr_settings0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_no_paritioning_columns[v1-true-client0] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.csv-csv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client0-year Int32 NOT NULL-True] >> TCheckpointStorageTest::ShouldCreateCheckpoint >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-result_sets] >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.zst-zstd] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.xz-xz] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith26Cpu::test [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-false-directory_scan-False-client0] [GOOD] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith37Cpu::test [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-True-client0] >> test_s3_0.py::TestS3::test_csv[v1-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_csv[v1-true-client0] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client12-year Date-False] [GOOD] >> test_format_setting.py::TestS3::test_interval_unit[v1-client0] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph >> test_insert.py::TestS3::test_insert[v2-client0-json_list-dataにちは% set] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client13-year Date NOT NULL-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client0-year Int32 NOT NULL-True] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client1-year Uint32 NOT NULL-True] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataset] >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test [GOOD] >> test_compressions.py::TestS3Compressions::test_compression[v1-test.json.xz-xz] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.gz-gzip] >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v1-kikimr_settings1-client0] [SKIPPED] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings1-client0] >> test_bindings_0.py::TestBindings::test_modify_connection_with_a_lot_of_bindings[v2-kikimr_settings1-client0] [SKIPPED] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_reverse_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-plan] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-False-client0] >> test_s3_0.py::TestS3::test_csv[v1-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_inference[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client13-year Date NOT NULL-True] [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client14-year Datetime-False] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.gz-gzip] [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_actorsystem.py::TestWithHybridNodeWith17Cpu::test >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.lz4-lz4] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client1-year Uint32 NOT NULL-True] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-exact_file-False-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client2-year Uint64 NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-True-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-result_sets] >> test_s3_0.py::TestS3::test_inference[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_inference_null_column[v2-client0] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataにちは% set] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client2-year Uint64 NOT NULL-True] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_actorsystem.py::TestWithComputeNodeWith24Cpu::test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client3-year Date NOT NULL-False] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client14-year Datetime-False] [GOOD] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client15-year Datetime NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.lz4-lz4] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.br-brotli] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith9Cpu::test [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-False-client0] >> test_s3_0.py::TestS3::test_inference_null_column[v2-client0] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith26Cpu::test >> test_s3_0.py::TestS3::test_inference_optional_types[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_subquery.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-plan] >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> test_actorsystem.py::TestWithHybridNodeWith17Cpu::test [GOOD] >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-false-client15-year Datetime NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client0-year Int32-False] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[join/join_to_idx_lookup_partial_inner.sql-result_sets] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client4-year String NOT NULL-True] >> test_insert.py::TestS3::test_insert[v2-client0-json_each_row-dataにちは% set] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataset] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-8.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.bz2-bzip2] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v2-true-directory_scan-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-True-client0] >> test_s3_0.py::TestS3::test_inference_optional_types[v2-client0] [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> test_s3_0.py::TestS3::test_inference_multiple_files[v2-client0] >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-result_sets] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-False-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client0-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client4-year String NOT NULL-True] [GOOD] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.bz2-bzip2] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client1-year Int32 NOT NULL-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client5-year String-False] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.zst-zstd] >> test_s3_0.py::TestS3::test_inference_multiple_files[v2-client0] [GOOD] >> test_s3_0.py::TestS3::test_inference_file_error[v2-client0] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith15Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataset] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-exact_file-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-True-client0] >> test_actorsystem.py::TestWithComputeNodeWith24Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataにちは% set] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] [GOOD] |91.6%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |91.6%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |91.6%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> TStateStorageTest::ShouldSaveGetOldSmallState >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pk_predicate/pk_predicate_utf8.sql-result_sets] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-plan] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.csv-csv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.tsv-tsv_with_names] >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState >> test_actorsystem.py::TestWithHybridNodeWith26Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client1-year Int32 NOT NULL-False] [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client2-year Uint32-False] >> TStateStorageTest::ShouldSaveGetIncrementSmallState >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.zst-zstd] [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.xz-xz] >> test_format_setting.py::TestS3::test_interval_unit[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_interval_unit[v2-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client5-year String-False] [GOOD] >> test_s3_0.py::TestS3::test_inference_file_error[v2-client0] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_postgres.py::TestPGSQL::test_sql_suite[results-strings.test] >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_formats.py::TestS3Formats::test_format_inference[v2-test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client6-year Utf8 NOT NULL-True] >> test_insert.py::TestS3::test_insert[v2-client0-csv_with_names-dataにちは% set] [GOOD] >> test_s3_0.py::TestS3::test_inference_parameters[v2-client0] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_each_row.json-json_each_row] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-True-client0] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataset] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-False-client0] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-4.test] [GOOD] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v1-client0] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-result_sets] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] >> test_compressions.py::TestS3Compressions::test_compression_inference[v2-test.csv.xz-xz] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client2-year Uint32-False] [GOOD] |91.6%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.gz-gzip] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client3-year Uint32 NOT NULL-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client6-year Utf8 NOT NULL-True] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client7-year Utf8-False] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-false-directory_scan-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-True-client0] >> test_s3_0.py::TestS3::test_inference_parameters[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_each_row.json-json_each_row] [GOOD] >> test_s3_0.py::TestS3::test_inference_timestamp[v2-client0] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_list.json-json_list] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataset] [GOOD] >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] [GOOD] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataにちは% set] |91.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_1.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-plan] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client3-year Uint32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client4-year Int64-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-false] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-True-client0] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-False-client0] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test 2025-05-05 09:53:16,887 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 09:53:17,410 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 2891375 140M 143M 90.3M ydb-tests-olap-data_quotas --basetemp /home/runner/.ya/build/build_root/kpz1/000f8b/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 2894186 5.7G 5.7G 5.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/kpz1/000f8b/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk Test command err: upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False upsert #95 ok, result: [] Quota exceeded False upsert #96 ok, result: [] Quota exceeded False upsert #97 ok, result: [] Quota exceeded False upsert #98 ok, result: [] Quota exceeded False upsert #99 ok, result: [] Quota exceeded False upsert #100 ok, result: [] Quota exceeded False upsert #101 ok, result: [] Quota exceeded False upsert #102 ok, result: [] Quota exceeded False upsert #103 ok, result: [] Quota exceeded False upsert #104 ok, result: [] Quota exceeded False upsert #105 ok, result: [] Quota exceeded False upsert #106 ok, result: [] Quota exceeded False upsert #107 ok, result: [] Quota exceeded False upsert #108 ok, result: [] Quota exceeded False upsert #109 ok, result: [] Quota exceeded False upsert #110 ok, result: [] Quota exceeded False upsert #111 ok, result: [] Quota exceeded False upsert #112 ok, result: [] Quota exceeded False upsert #113 ok, result: [] Quota exceeded False upsert #114 ok, result: [] Quota exceeded False upsert #115 ok, result: [] Quota exceeded False upsert #116 ok, result: [] Quota exceeded False upsert #117 ok, result: [] Quota exceeded False upsert #118 ok, result: [] Quota exceeded False upsert #119 ok, result: [] Quota exceeded False upsert #120 ok, result: [] Quota exceeded False upsert #121 ok, result: [] Quota exceeded False upsert #122 ok, result: [] Quota exceeded False upsert #123 ok, result: [] Quota exceeded False upsert #124 ok, result: [] Quota exceeded False upsert #125 ok, result: [] Quota exceeded False upsert #126 ok, result: [] Quota exceeded False upsert #127 ok, result: [] Quota exceeded False upsert #128 ok, result: [] Quota exceeded False upsert #129 ok, result: [] Quota exceeded False upsert #130 ok, result: [] Quota exceeded False upsert #131 ok, result: [] Quota exceeded False upsert #132 ok, result: [] Quota exceeded False upsert #133 ok, result: [] Quota exceeded False upsert #134 ok, result: [] Quota exceeded False upsert #135 ok, result: [] Quota exceeded False upsert #136 ok, result: [] Quota exceeded False upsert #137 ok, result: [] Quota exceeded False upsert #138 ok, result: [] Quota exceeded False upsert #139 ok, result: [] Quota exceeded False upsert #140 ok, result: [] Quota exceeded False upsert #141 ok, result: [] Quota exceeded False upsert #142 ok, result: [] Quota exceeded False upsert #143 ok, result: [] Quota exceeded False upsert #144 ok, result: [] Quota exceeded False upsert #145 ok, result: [] Quota exceeded False upsert #146 ok, result: [] Quota exceeded False upsert #147 ok, result: [] Quota exceeded False upsert #148 ok, result: [] Quota exceeded False upsert #149 ok, result: [] Quota exceeded False upsert #150 ok, result: [] Quota exceeded False upsert #151 ok, result: [] Quota exceeded False upsert #152 ok, result: [] Quota exceeded False upsert #153 ok, result: [] Quota exceeded False upsert #154 ok, result: [] Quota exceeded False upsert #155 ok, result: [] Quota exceeded False upsert #156 ok, result: [] Quota exceeded False upsert #157 ok, result: [] Quota exceeded False upsert #158 ok, result: [] Quota exceeded False upsert #159 ok, result: [] Quota exceeded False upsert #160 ok, result: [] Quota exceeded False upsert #161 ok, result: [] Quota exceeded False File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 98, in test self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, 'huge', i, retries=0)) File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 78, in upsert_until_overload res = do_upsert(i) File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 98, in self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, 'huge', i, retries=0)) File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 56, in upsert_test_chunk return session.execute_with_retries(f""" File "contrib/python/ydb/py3/ydb/query/pool.py", line 204, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007f8f8e1ff640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 193 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 252 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 533 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 547 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f8f8efff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f8fab981380 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 558 in stop File "ydb/tests/olap/data_quotas/test_quota_exhaustion.py", line 38 in teardown_method File "contrib/python/pytest/py3/_pytest/python.py", line 779 in _call_with_optional_argument File "contrib/python/pytest/py3/_pytest/python.py", line 910 in xunit_setup_method_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 911 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1024 in finish File "contrib/python/pytest/py3/_pytest/fixtures.py", line 701 in File "contrib/python/pytest/py3/_pytest/runner.py", line 526 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 108 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 306 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...s-olap-data_quotas', '--basetemp', '/home/runner/.ya/build/build_root/kpz1/000f8b/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/kpz1/000f8b/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/kpz1/000f8b', '--source-root', '/home/runner/.ya/build/build_root/kpz1/000f8b/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/kpz1/000f8b/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/data_quotas', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/olap/data_quotas', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...s-olap-data_quotas', '--basetemp', '/home/runner/.ya/build/build_root/kpz1/000f8b/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/kpz1/000f8b/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/kpz1/000f8b', '--source-root', '/home/runner/.ya/build/build_root/kpz1/000f8b/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/kpz1/000f8b/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/data_quotas', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--modulo', '10', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/olap/data_quotas', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) 2025-05-05 09:53:48,058 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-05-05 09:53:48,059 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.gz-gzip] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.lz4-lz4] >> test_formats.py::TestS3Formats::test_format_inference[v2-test_list.json-json_list] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.parquet-parquet] >> test_postgres.py::TestPGSQL::test_sql_suite[results-strings.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-exact_file-False-client0] [GOOD] >> TCheckpointStorageTest::ShouldRegisterCoordinator >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-True-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client7-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client8-year Int32-False] >> test_format_setting.py::TestS3::test_interval_unit[v2-client0] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test [GOOD] >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] [GOOD] >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> test_format_setting.py::TestS3::test_bad_format_setting[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-result_sets] >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc >> test_format_setting.py::TestS3::test_bad_format_setting[v1-client0] [GOOD] |91.7%| [TA] $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_format_setting.py::TestS3::test_bad_format_setting[v2-client0] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_format_setting.py::TestS3::test_bad_format_setting[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_format_inference[v2-test.parquet-parquet] [GOOD] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-True-client0] [GOOD] |91.7%| [TA] {RESULT} $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.csv-csv_with_names] >> test_formats.py::TestS3Formats::test_btc[v2] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-False-client0] >> test_actorsystem.py::TestWithComputeNodeWith25Cpu::test >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.lz4-lz4] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client4-year Int64-False] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.br-brotli] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client5-year Int64 NOT NULL-False] >> test_insert.py::TestS3::test_insert[v2-client0-parquet-dataにちは% set] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataset] >> test_bindings_1.py::TestBindings::test_s3_format_mismatch[v1-client0] [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> test_bindings_1.py::TestBindings::test_pg_binding[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client8-year Int32-False] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith27Cpu::test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client9-year Uint32-False] >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_10.sql-result_sets] [GOOD] >> test_push_down.py::TestS3PushDown::test_simple_case[v1-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-plan] >> test_s3_1.py::TestS3::test_i18n_unpartitioned[v1-true-directory_scan-False-client0] [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-False-client0] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-4.test] [GOOD] >> test_s3_0.py::TestS3::test_inference_timestamp[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.csv-csv_with_names] [GOOD] >> test_s3_0.py::TestS3::test_inference_projection[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_formats.py::TestS3Formats::test_btc[v2] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.tsv-tsv_with_names] >> test_formats.py::TestS3Formats::test_btc[v1] >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client6-year Uint64-False] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.bz2-bzip2] >> test_bindings_1.py::TestBindings::test_pg_binding[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_pg_binding[v1-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-result_sets] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-False-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-True-client0] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.json-json_each_row] |91.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client9-year Uint32-False] [GOOD] >> test_s3_0.py::TestS3::test_inference_projection[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client10-year Int64 NOT NULL-True] >> test_s3_0.py::TestS3::test_inference_null_column_name[v2-client0] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataset] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.bz2-bzip2] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataにちは% set] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.zst-zstd] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client6-year Uint64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client7-year Uint64 NOT NULL-False] |91.7%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client10-year Int64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client11-year Int64-False] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.json-json_each_row] [GOOD] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_actorsystem.py::TestWithComputeNodeWith25Cpu::test [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.parquet-parquet] >> test_bindings_1.py::TestBindings::test_pg_binding[v1-client0] [GOOD] >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_btc[v1] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_11.sql-result_sets] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-yql_syntax-client0] >> test_formats.py::TestS3Formats::test_invalid_format[v2-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-plan] >> test_s3_0.py::TestS3::test_inference_null_column_name[v2-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-hive-True-client0] [GOOD] >> test_s3_0.py::TestS3::test_inference_unsupported_types[v2-client0] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-False-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client7-year Uint64 NOT NULL-False] [GOOD] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client8-year String NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.zst-zstd] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.xz-xz] >> test_s3_0.py::TestS3::test_inference_unsupported_types[v2-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-false] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v1-date/simple/test.parquet-parquet] [GOOD] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith18Cpu::test [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_list-dataにちは% set] [GOOD] >> test_s3_0.py::TestS3::test_json_list_formats[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-true] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.csv-csv_with_names] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataset] >> test_actorsystem.py::TestWithHybridNodeWith27Cpu::test [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client11-year Int64-False] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client12-year Uint64-False] >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] [GOOD] >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client8-year String NOT NULL-True] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-False-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client9-year String-False] >> test_formats.py::TestS3Formats::test_invalid_format[v2-client0] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-result_sets] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-yql_syntax-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-True-client0] >> test_formats.py::TestS3Formats::test_invalid_format[v1-client0] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-pg_syntax-client0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] >> test_compressions.py::TestS3Compressions::test_big_compression[v2-big.json.xz-xz] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params0-true] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.gz-gzip] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-false] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.tsv-tsv_with_names] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataset] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataにちは% set] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_12.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-plan] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v2-pg_syntax-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-yql_syntax-client0] >> test_s3_0.py::TestS3::test_json_list_formats[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client9-year String-False] [GOOD] >> test_s3_0.py::TestS3::test_csv_with_hopping[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client10-year Utf8-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client12-year Uint64-False] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-false-projection-True-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client13-year Date-False] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.gz-gzip] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-False-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.lz4-lz4] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_bindings_0.py::TestBindings::test_name_uniqueness_constraint[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd5/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd5/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3414098) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3417499 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_format[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.json-json_each_row] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v2-client0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.lz4-lz4] [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-yql_syntax-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-pg_syntax-client0] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params1] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.br-brotli] >> test_insert.py::TestS3::test_insert[v1-client0-json_each_row-dataにちは% set] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-false] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-result_sets] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataset] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] >> test_s3_0.py::TestS3::test_csv_with_hopping[v2-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-true] >> test_s3_0.py::TestS3::test_csv_with_hopping[v1-client0] >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.parquet-parquet] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-False-client0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] [SKIPPED] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client10-year Utf8-False] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-True-client0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client11-year Utf8 NOT NULL-True] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-false-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client0-year Int32 NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.br-brotli] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.bz2-bzip2] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] >> test_bindings_1.py::TestBindings::test_count_for_pg_binding[v1-pg_syntax-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client11-year Utf8 NOT NULL-True] [GOOD] >> test_format_setting.py::TestS3::test_date_simple[v2-date/simple/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client12-year Date-False] >> test_s3_0.py::TestS3::test_csv_with_hopping[v1-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v2-false-client0] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_13.sql-result_sets] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-plan] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.csv-csv_with_names] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.bz2-bzip2] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.zst-zstd] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_formats.py::TestS3Formats::test_invalid_input_compression[v2-client0] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataset] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params0-true] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataにちは% set] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-false] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v1-client0] |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client0-year Int32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client1-year Uint32 NOT NULL-True] >> test_s3_0.py::TestS3::test_raw[v2-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-hive-True-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v2-true-client0] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-False-client0] >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-plan] [GOOD] >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client12-year Date-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client13-year Date NOT NULL-True] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.tsv-tsv_with_names] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-false] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-csv_with_names-dataにちは% set] [GOOD] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.zst-zstd] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-true] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataset] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.xz-xz] >> test_s3_0.py::TestS3::test_raw[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client1-year Uint32 NOT NULL-True] [GOOD] >> test_s3_0.py::TestS3::test_raw[v1-false-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client2-year Uint64 NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |91.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-window.test] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_input_compression[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v2-client0] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-False-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client13-year Date NOT NULL-True] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-True-client0] >> test_compressions.py::TestS3Compressions::test_big_compression[v1-big.json.xz-xz] [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client14-year Datetime-False] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] >> test_s3_0.py::TestS3::test_raw[v1-false-client0] [GOOD] >> test_s3_0.py::TestS3::test_raw[v1-true-client0] >> test_bindings_1.py::TestBindings::test_ast_in_failed_query_compilation[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataset] [GOOD] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v2-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.json-json_each_row] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataにちは% set] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client2-year Uint64 NOT NULL-True] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params0-true] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client3-year Date NOT NULL-False] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-false] >> test_bindings_1.py::TestBindings::test_raw_empty_schema_binding[v1-client0] [GOOD] >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v2-client0] >> test_s3_1.py::TestS3::test_i18n_partitioning[v2-true-projection-True-client0] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v2-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-False-client0] >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_formats.py::TestS3Formats::test_invalid_output_compression[v1-client0] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_streaming_join.py::TestStreamingJoin::test_grace_join[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cae/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_streaming_join/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cae/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_streaming_join/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3491946) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3494642 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client14-year Datetime-False] [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client15-year Datetime NOT NULL-True] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v1-client0] >> test_s3_0.py::TestS3::test_raw[v1-true-client0] [GOOD] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v2-client0] [GOOD] >> test_insert.py::TestS3::test_insert[v1-client0-parquet-dataにちは% set] [GOOD] >> test_s3_0.py::TestS3::test_limit[v2-false-kikimr_params0-client0] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v1-client0] >> test_insert.py::TestS3::test_big_json_list_insert[v2-client0] >> test_actorsystem.py::TestWithHybridNodeWith28Cpu::test |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-True-client0] >> test_explicit_partitioning_1.py::TestS3::test_projection_date_type_validation[v1-true-client15-year Datetime NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client0-column_type0-True] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client3-year Date NOT NULL-False] [GOOD] >> test_bindings_1.py::TestBindings::test_binding_with_backslash_in_location[v1-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client4-year String NOT NULL-True] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.parquet-parquet] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_push_down.py::TestS3PushDown::test_simple_case[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000c8b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_push_down/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000c8b/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_push_down/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3535071) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3538031 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_formats.py::TestS3Formats::test_invalid_output_compression[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client0-column_type0-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client1-column_type1-True] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression[v1-client0] [GOOD] >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client1-column_type1-True] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client2-column_type2-True] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client4-year String NOT NULL-True] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-true] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client2-column_type2-True] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-hive-True-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client5-year String-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client3-column_type3-False] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-False-client0] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join1.test] >> TYdbControlPlaneStorageListConnections::ShouldSuccess >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client3-column_type3-False] [GOOD] >> test_bindings_1.py::TestBindings::test_decimal_binding[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v2-client0] [GOOD] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client4-column_type4-True] >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v1-client0] >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test [GOOD] >> test_insert.py::TestS3::test_big_json_list_insert[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client4-column_type4-True] [GOOD] >> test_insert.py::TestS3::test_big_json_list_insert[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client5-column_type5-True] >> test_format_setting.py::TestS3::test_date_simple_insert[v1-date/simple/test.parquet-parquet] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params0-true] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.csv-csv_with_names] >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-false] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client5-column_type5-True] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-False-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-True-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client6-column_type6-True] >> TYdbControlPlaneStorageListConnections::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldPageToken ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_formats.py::TestS3Formats::test_custom_csv_delimiter_format[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client6-column_type6-True] [GOOD] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-5.test] [GOOD] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client7-column_type7-False] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client7-column_type7-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client5-year String-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client8-column_type8-False] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v2[v2-client0] [GOOD] >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client6-year Utf8 NOT NULL-True] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v1[v1-client0] |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client9-column_type9-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client10-column_type10-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client6-year Utf8 NOT NULL-True] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-false-projection-True-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client7-year Utf8-False] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-False-client0] >> test_insert.py::TestS3::test_big_json_list_insert[v1-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-false] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client10-column_type10-False] [GOOD] >> test_insert.py::TestS3::test_insert_csv_delimiter[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-true] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.csv-csv_with_names] [GOOD] >> test_actorsystem.py::TestWithHybridNodeWith28Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client11-column_type11-False] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.tsv-tsv_with_names] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client11-column_type11-False] [GOOD] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client12-column_type12-False] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client13-column_type13-False] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v2-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client0-column_type0-True] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client7-year Utf8-False] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-False-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client0-column_type0-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client8-year Int32-False] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-True-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client1-column_type1-True] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params0-true] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-false] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client1-column_type1-True] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join1.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] >> TYdbControlPlaneStorageListConnections::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client2-column_type2-True] >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client2-column_type2-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client3-column_type3-False] >> test_formats.py::TestS3Formats::test_no_not_nullable_column[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_no_nullable_column[v2-client0] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client3-column_type3-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client4-column_type4-True] >> test_insert.py::TestS3::test_insert_csv_delimiter[v2-client0] [GOOD] >> test_insert.py::TestS3::test_insert_csv_delimiter[v1-client0] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client4-column_type4-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client5-column_type5-True] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client8-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client5-column_type5-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client9-year Uint32-False] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/py3test >> test_sql.py::TestCanonicalFolder1::test_case[pure/trivial_2.sql-result_sets] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client6-column_type6-True] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-false] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-hive-True-client0] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client6-column_type6-True] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-true] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-False-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client7-column_type7-False] >> test_formats.py::TestS3Formats::test_no_nullable_column[v2-client0] [GOOD] |92.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithComputeNodeWith26Cpu::test [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client7-column_type7-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client8-column_type8-False] >> test_formats.py::TestS3Formats::test_no_nullable_column[v1-client0] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-13.test] [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client9-column_type9-False] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client10-column_type10-False] >> test_insert.py::TestS3::test_insert_csv_delimiter[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client10-column_type10-False] [GOOD] >> test_insert.py::TestS3::test_append[v2-client0] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-5.test] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client11-column_type11-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client11-column_type11-False] [GOOD] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.2%| [TA] $(B)/ydb/tests/functional/canonical/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client12-column_type12-False] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params0-true] [GOOD] >> test_formats.py::TestS3Formats::test_no_nullable_column[v1-client0] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-False-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.parquet-parquet] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v2-client0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-false] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-True-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client13-column_type13-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client9-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client10-year Int64 NOT NULL-True] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_integer_type_validation[v1-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client0-column_type0-False] |92.2%| [TA] {RESULT} $(B)/ydb/tests/functional/canonical/test-results/py3test/{meta.json ... results_accumulator.log} >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client0-column_type0-False] [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client1-column_type1-False] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client1-column_type1-False] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client2-column_type2-False] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client10-year Int64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client11-year Int64-False] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [FAIL] |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client2-column_type2-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client3-column_type3-False] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldValidate >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v2-client0] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client3-column_type3-False] [GOOD] >> test_s3_1.py::TestS3::test_i18n_partitioning[v1-true-projection-True-client0] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v2-false-client0] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client4-column_type4-False] >> test_format_setting.py::TestS3::test_date_simple_insert[v2-date/simple/test.parquet-parquet] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |92.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client4-column_type4-False] [GOOD] >> test_insert.py::TestS3::test_append[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.csv-csv_with_names] >> test_insert.py::TestS3::test_append[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client5-column_type5-True] >> TYdbControlPlaneStorageListConnections::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client5-column_type5-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client6-column_type6-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client6-column_type6-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client7-column_type7-False] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client7-column_type7-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client8-column_type8-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client11-year Int64-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client12-year Uint64-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client8-column_type8-False] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_type_in_csv[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client9-column_type9-False] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v2-client0] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client9-column_type9-False] [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client10-column_type10-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client10-column_type10-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client11-column_type11-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client11-column_type11-False] [GOOD] >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v2-false-client0] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client12-column_type12-False] >> test_s3_0.py::TestS3::test_limit[v2-false-kikimr_params0-client0] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v2-true-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_compressions.py::TestS3Compressions::test_invalid_compression_inference[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cb4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_compressions/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cb4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_compressions/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3486168) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3491008 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_0.py::TestS3::test_limit[v2-true-kikimr_params0-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client12-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client13-year Date-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client12-column_type12-False] [GOOD] >> test_insert.py::TestS3::test_append[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client13-column_type13-False] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_insert.py::TestS3::test_part_split[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v2-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client0-column_type0-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client0-column_type0-False] [GOOD] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client1-column_type1-False] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client1-column_type1-False] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client2-column_type2-False] >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate >> test_validation.py::TestS3::test_empty[v1-client0] [GOOD] >> test_validation.py::TestS3::test_nested_issues[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client2-column_type2-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client3-column_type3-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client3-column_type3-False] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join2.test] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v2-true-client13-year Date-False] [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client4-column_type4-False] >> test_insert.py::TestS3::test_part_split[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client0-year Int32 NOT NULL-True] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_insert.py::TestS3::test_part_split[v1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client4-column_type4-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client5-column_type5-True] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client5-column_type5-True] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v2-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client6-column_type6-False] >> test_s3_1.py::TestS3::test_huge_source[v1-false-client0] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client6-column_type6-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client7-column_type7-False] >> test_formats.py::TestS3Formats::test_invalid_column_in_parquet[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_simple_pg_types[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client0-year Int32 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client1-year Uint32 NOT NULL-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client7-column_type7-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client8-column_type8-False] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client9-column_type9-False] >> test_insert.py::TestS3::test_part_split[v1-client0] [GOOD] >> test_insert.py::TestS3::test_part_merge[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v1-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client9-column_type9-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d8b/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d8b/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3146448 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_validation.py::TestS3::test_nested_issues[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client10-column_type10-False] >> test_validation.py::TestS3::test_nested_issues[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client1-year Uint32 NOT NULL-True] [GOOD] >> test_s3_0.py::TestS3::test_limit[v2-true-kikimr_params0-client0] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client2-year Uint64 NOT NULL-True] >> test_s3_0.py::TestS3::test_limit[v1-false-kikimr_params0-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client10-column_type10-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client11-column_type11-False] >> test_formats.py::TestS3Formats::test_simple_pg_types[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client11-column_type11-False] [GOOD] |92.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client12-column_type12-False] >> test_formats.py::TestS3Formats::test_simple_pg_types[v1-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_bindings_1.py::TestBindings::test_decimal_binding[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000caa/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000caa/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_bindings_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3499749) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3503538 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client2-year Uint64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client13-column_type13-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client3-year Date NOT NULL-False] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_enum_type_validation[v1-client13-column_type13-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client0-column_type0-False] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client0-column_type0-False] [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client1-column_type1-True] >> test_formats.py::TestS3Formats::test_simple_pg_types[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_precompute[v2-client0] >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client1-column_type1-True] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v1-false-client0] [GOOD] >> test_s3_1.py::TestS3::test_huge_source[v1-true-client0] >> test_validation.py::TestS3::test_nested_issues[v1-client0] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client2-column_type2-False] >> test_validation.py::TestS3::test_nested_type[v2-client0] >> test_insert.py::TestS3::test_part_merge[v2-client0] [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_insert.py::TestS3::test_part_merge[v1-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client2-column_type2-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client3-column_type3-True] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client4-year String NOT NULL-True] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client3-column_type3-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client4-column_type4-True] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> test_formats.py::TestS3Formats::test_precompute[v2-client0] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client4-column_type4-True] [GOOD] >> test_formats.py::TestS3Formats::test_precompute[v1-client0] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client5-column_type5-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client4-year String NOT NULL-True] [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivate |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client5-year String-False] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client5-column_type5-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.parquet-parquet] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client6-column_type6-True] >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-false] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithHybridNodeWith29Cpu::test [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-true] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client6-column_type6-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client7-column_type7-True] >> test_s3_0.py::TestS3::test_limit[v1-false-kikimr_params0-client0] [GOOD] |92.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_s3_0.py::TestS3::test_limit[v1-true-kikimr_params0-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client7-column_type7-True] [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivatePublic >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client8-column_type8-False] >> test_formats.py::TestS3Formats::test_precompute[v1-client0] [GOOD] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client8-column_type8-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client9-column_type9-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client5-year String-False] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params1-true] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client6-year Utf8 NOT NULL-True] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-false] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client10-column_type10-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_format_setting.py::TestS3::test_timestamp_simple_iso[v2-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client10-column_type10-False] [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckLimit >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client11-column_type11-False] >> test_s3_1.py::TestS3::test_huge_source[v1-true-client0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_s3_1.py::TestS3::test_top_level_listing[v2-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client11-column_type11-False] [GOOD] >> test_insert.py::TestS3::test_part_merge[v1-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-false] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client12-column_type12-False] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_list] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-true] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_list] [SKIPPED] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client13-column_type13-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client6-year Utf8 NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client7-year Utf8-False] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client13-column_type13-False] [GOOD] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v1-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client14-column_type14-False] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params1-true] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-false] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client14-column_type14-False] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing[v2-false-client0] [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client15-column_type15-False] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_s3_0.py::TestS3::test_limit[v1-true-kikimr_params0-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v2-client15-column_type15-False] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v2-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client0-column_type0-False] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> TYdbControlPlaneStorageListQueries::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckScopeVisibility >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client7-year Utf8-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client8-year Int32-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client0-column_type0-False] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client1-column_type1-True] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client1-column_type1-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client2-column_type2-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client2-column_type2-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client3-column_type3-True] >> test_insert.py::TestS3::test_part_binding[v2-client0-json_each_row] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-false] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_insert.py::TestS3::test_part_binding[v2-client0-csv_with_names] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-true] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.json-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client3-column_type3-True] [GOOD] >> test_formats.py::TestS3Formats::test_raw_empty_schema_query[v1-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client4-column_type4-True] >> test_s3_1.py::TestS3::test_top_level_listing[v2-true-client0] [GOOD] >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing[v1-false-client0] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client4-column_type4-True] [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client5-column_type5-False] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client5-column_type5-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client6-column_type6-True] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client6-column_type6-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client8-year Int32-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client7-column_type7-True] >> TYdbControlPlaneStorageListQueries::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckPrivateVisibility >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client9-year Uint32-False] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client7-column_type7-True] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client8-column_type8-False] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params1-true] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client8-column_type8-False] [GOOD] >> test_s3_1.py::TestS3::test_top_level_listing[v1-false-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.parquet-parquet] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-false] >> test_insert.py::TestS3::test_part_binding[v2-client0-csv_with_names] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client9-column_type9-False] >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_list] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_list] [SKIPPED] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_each_row] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client9-column_type9-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client10-column_type10-False] |92.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client10-column_type10-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client11-column_type11-False] >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client11-column_type11-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client12-column_type12-False] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> TYdbControlPlaneStorageListQueries::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckSuperUser >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client12-column_type12-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client13-column_type13-False] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client9-year Uint32-False] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client10-year Int64 NOT NULL-True] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client13-column_type13-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client14-column_type14-False] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v1-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client14-column_type14-False] [GOOD] >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client15-column_type15-False] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client10-year Int64 NOT NULL-True] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client11-year Int64-False] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_binding_projection_date_type_validation[v1-client15-column_type15-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-false-client0] >> TYdbControlPlaneStorageListQueries::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_validation.py::TestS3::test_nested_type[v2-client0] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_validation.py::TestS3::test_nested_type[v1-client0] >> test_insert.py::TestS3::test_part_binding[v1-client0-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.csv-csv_with_names] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-false] [GOOD] >> test_insert.py::TestS3::test_part_binding[v1-client0-csv_with_names] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.tsv-tsv_with_names] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-true] >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-false-client0] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-true-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client11-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client12-year Uint64-False] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_directory_v1[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v2-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.json-json_each_row] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v2-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params1-true] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-false] >> test_ydb_over_fq.py::TestYdbOverFq::test_list_without_streams[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v2-client0] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_insert.py::TestS3::test_part_binding[v1-client0-csv_with_names] [GOOD] |92.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_insert.py::TestS3::test_error[v1-client0-json_each_row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167140 9624 ? Ss 05:35 0:04 /sbin/init root 2 0.0 0.0 0 0 ? S 05:35 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 05:35 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 05:35 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 05:35 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/0:0H-events_highpri] root 9 1.1 0.0 0 0 ? I 05:35 2:54 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 05:35 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 05:35 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/0] root 15 0.1 0.0 0 0 ? I 05:35 0:23 [rcu_sched] root 16 0.0 0.0 0 0 ? S 05:35 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/0] root 19 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 05:35 0:00 [migration/1] root 23 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 05:35 0:03 [migration/2] root 29 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 05:35 0:03 [migration/3] root 35 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 05:35 0:03 [migration/4] root 41 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 05:35 0:03 [migration/5] root 47 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 05:35 0:03 [migration/6] root 53 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/6] root 55 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 05:35 0:03 [migration/7] root 59 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 05:35 0:03 [migration/8] root 65 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/8:0H-events_highpri] root 68 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 05:35 0:03 [migration/9] root 71 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/9] root 73 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 05:35 0:03 [migration/10] root 77 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 05:35 0:03 [migration/11] root 83 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/11] root 85 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 05:35 0:03 [migration/12] root 89 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/12] root 91 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 05:35 0:03 [migration/13] root 95 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/13] root 97 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 05:35 0:03 [migration/14] root 101 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 05:35 0:03 [migration/15] root 107 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 05:35 0:03 [migration/16] root 113 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/16] root 115 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 05:35 0:03 [migration/17] root 119 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 05:35 0:03 [migration/18] root 125 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/18:0H-kblockd] root 128 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 05:35 0:03 [migration/19] root 131 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/19:0H-kblockd] root 134 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 05:35 0:03 [migration/20] root 137 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/20] root 139 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 05:35 0:03 [migration/21] root 143 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/21] root 144 0.0 0.0 0 0 ? I 05:35 0:01 [kworker/21:0-rcu_gp] root 145 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 05:35 0:03 [migration/22] root 149 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 05:35 0:03 [migration/23] root 155 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/24] root 160 0.0 0.0 0 0 ? S 05:35 0:03 [migration/24] root 161 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/24] root 163 0.0 0.0 0 0 ? I< 05:35 0:00 [kworker/24:0H-events_highpri] root 164 0.0 0.0 0 0 ? S 05:35 0:00 [cpuhp/25] root 165 0.0 0.0 0 0 ? S 05:35 0:00 [idle_inject/25] root 166 0.0 0.0 0 0 ? S 05:35 0:03 [migration/25] root 167 0.0 0.0 0 0 ? S 05:35 0:01 [ksoftirqd/25] root 169 0.0 ... . Create session OK 2025-05-05T09:54:46.915018Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:54:46.915021Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:54:46.915313Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-05-05T09:54:46.915327Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:54:46.915328Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:54:46.915461Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-05-05T09:54:46.915474Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:54:46.915476Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:54:46.915559Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-05-05T09:54:46.915570Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:54:46.915572Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:54:46.915607Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-05-05T09:54:46.915617Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:54:46.915618Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:54:46.915697Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets". Create session OK 2025-05-05T09:54:46.915706Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:54:46.915707Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:54:46.915716Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2025-05-05T09:54:46.915718Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:54:46.915720Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:54:46.934662Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)" 2025-05-05T09:54:46.934679Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)": 2025-05-05T09:54:46.976932Z node 17 :YQ_RATE_LIMITER DEBUG: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-05-05T09:54:46.976964Z node 17 :YQ_RATE_LIMITER DEBUG: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-05-05T09:54:46.995248Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas" 2025-05-05T09:54:46.995263Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/quotas": 2025-05-05T09:54:46.999231Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes" 2025-05-05T09:54:46.999249Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/nodes": 2025-05-05T09:54:46.999558Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs" 2025-05-05T09:54:46.999567Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/jobs": 2025-05-05T09:54:46.999685Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries" 2025-05-05T09:54:46.999695Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/queries": 2025-05-05T09:54:47.000510Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections" 2025-05-05T09:54:47.000527Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/connections": 2025-05-05T09:54:47.000568Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-05-05T09:54:47.000574Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-05-05T09:54:47.000740Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-05-05T09:54:47.000743Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-05-05T09:54:47.000764Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants" 2025-05-05T09:54:47.000768Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenants": 2025-05-05T09:54:47.000819Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-05-05T09:54:47.000821Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-05-05T09:54:47.001427Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-05-05T09:54:47.001443Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-05-05T09:54:47.001805Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings" 2025-05-05T09:54:47.001825Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/bindings": 2025-05-05T09:54:47.002038Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings" 2025-05-05T09:54:47.002049Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/mappings": 2025-05-05T09:54:47.002053Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-05-05T09:54:47.002057Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageListQueries::TTestCaseShouldCombineFilters::Execute_(NUnitTest::TTestContext&)/compute_databases": >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v2-true-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client12-year Uint64-False] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-false-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client13-year Date-False] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.parquet-parquet] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-false] [GOOD] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-true] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] >> test_format_setting.py::TestS3::test_timestamp_simple_iso_insert[v2-timestamp/simple_iso/test.parquet-parquet] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-false-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-true-client0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-false-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client0-year Int32 NOT NULL-True] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start |92.7%| [TA] $(B)/ydb/tests/fq/control_plane_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_insert.py::TestS3::test_error[v1-client0-json_each_row] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params1-true] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-csv_with_names] >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-false] >> test_explicit_partitioning_1.py::TestS3::test_raw_format[v1-true-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-false-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client0-year Int32 NOT NULL-True] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v2-false-client0] [GOOD] >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client1-year Uint32 NOT NULL-True] >> test_s3_0.py::TestS3::test_bad_format[v2-true-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.json-json_each_row] >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test |92.7%| [TA] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_formats.py::TestS3Formats::test_with_infer_and_unsupported_option[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cb1/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cb1/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_formats/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3489873) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3493331 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client1-year Uint32 NOT NULL-True] [GOOD] >> test_select_1.py::TestSelect1::test_compile_error[v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client2-year Uint64 NOT NULL-True] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.json-json_each_row] [GOOD] |92.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.parquet-parquet] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-with_checkpoints] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v2-client0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v1-client0] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-false] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v2-true-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-true] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] >> test_s3_0.py::TestS3::test_bad_format[v1-false-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client2-year Uint64 NOT NULL-True] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client3-year Date NOT NULL-False] >> test_insert.py::TestS3::test_error[v1-client0-csv_with_names] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-tsv_with_names] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-false-client0] [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-true-client0] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-with_checkpoints-mvp_external_ydb_endpoint0] >> test_validation.py::TestS3::test_nested_type[v1-client0] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_all_types-pk_types12-all_types12-index12---] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params1-true] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-false] >> test_row_dispatcher.py::TestPqRowDispatcher::test_restart_compute_node |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client4-year String NOT NULL-True] >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query[v1-client0] [GOOD] >> test_s3_0.py::TestS3::test_bad_format[v1-false-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v2-client0] >> test_s3_0.py::TestS3::test_bad_format[v1-true-client0] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-tsv_with_names] [GOOD] >> test_insert.py::TestS3::test_error[v1-client0-parquet] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration >> test_insert.py::TestS3::test_error[v1-client0-parquet] [SKIPPED] >> test_insert.py::TestS3::test_insert_empty_object[v2] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client4-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client5-year String-False] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.json-json_each_row] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v2-true-client0] [GOOD] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-false-client0] >> test_s3_0.py::TestS3::test_bad_format[v1-true-client0] [GOOD] >> test_s3_0.py::TestS3::test_bad_request_on_invalid_parquet[v2-client0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v2-client0] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Results] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v1-client0] |92.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_insert.py::TestS3::test_insert_empty_object[v2] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_insert.py::TestS3::test_insert_empty_object[v1] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.parquet-parquet] |92.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client5-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client6-year Utf8 NOT NULL-True] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_s3_1.py::TestS3::test_top_level_listing[v1-true-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3376791) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_write_result.v1-kikimr_params0-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3381260 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_top_level_listing_2.v2-kikimr_params0-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_precompute.v2-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_1/testing_out_stuff/test_s3_1.py.TestS3.test_precompute.v2-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client6-year Utf8 NOT NULL-True] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client7-year Utf8-False] >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test.py::test[key_filter-string_with-default.txt-Results] >> test_s3_0.py::TestS3::test_bad_request_on_invalid_parquet[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] >> test_s3_0.py::TestS3::test_bad_request_on_compression[v2-client0] >> test_ydb_over_fq.py::TestYdbOverFq::test_stream_execute_scan_query[v1-client0] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v2-client0] >> test.py::test[insert-double_append_to_anonymous--Results] [GOOD] >> test.py::test[insert-replace_inferred--ForceBlocks] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-false-client0] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-true-client0] >> test_insert.py::TestS3::test_insert_empty_object[v1] [GOOD] >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-9.test] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client7-year Utf8-False] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] [GOOD] >> test.py::test[action-eval_if-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client8-year Int32-False] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-with_checkpoints] [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_s3_0.py::TestS3::test_bad_request_on_compression[v2-client0] [GOOD] |92.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_inflight.py::TestS3::test_inflight[v1-client0-kikimr_params2] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cdc/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cdc/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_inflight/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3393516) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3397569 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] >> test_watermarks.py::TestWatermarks::test_pq_watermarks[v1-mvp_external_ydb_endpoint0] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-with_checkpoints-mvp_external_ydb_endpoint0] [GOOD] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test.py::test[join-mapjoin_with_empty_read--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_read--Results] [SKIPPED] >> test.py::test[join-mergejoin_big_primary--ForceBlocks] >> test.py::test[order_by-order_with_null-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] >> test.py::test[key_filter-string_with-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-19420--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client8-year Int32-False] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v1-client0] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client9-year Uint32-False] >> test_insert.py::TestS3::test_insert_without_format_error[v2-client0] [GOOD] >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/py3test >> test_dump_restore.py::TestDumpRestore::test_dump_restore[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test.py::test[action-eval_if-default.txt-Results] [GOOD] >> test.py::test[action-eval_sample--Results] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test.py::test[insert-replace_inferred--ForceBlocks] [GOOD] >> test.py::test[insert-replace_inferred--Results] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] >> test_select_1.py::TestSelect1::test_compile_error[v1] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-analytics] [GOOD] |93.0%| [TA] $(B)/ydb/tests/datashard/dump_restore/test-results/py3test/{meta.json ... results_accumulator.log} |93.0%| [TA] {RESULT} $(B)/ydb/tests/datashard/dump_restore/test-results/py3test/{meta.json ... results_accumulator.log} >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] >> test.py::test[join-equi_join_three_asterisk--Results] >> test_explicit_partitioning_1.py::TestS3::test_parquet[v1-true-client0] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test.py::test[insert-replace_inferred--Results] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--ForceBlocks] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v2-true-client0] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-create-streaming] [GOOD] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-false] [GOOD] >> test.py::test[join-yql-4275-off-ForceBlocks] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_results[v1-client0] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-true] >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client9-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client10-year Int64 NOT NULL-True] >> test.py::test[join-mergejoin_big_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary--Results] |93.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test.py::test[order_by-order_with_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_with_null-default.txt-Results] >> test.py::test[insert_monotonic-non_existing_fail--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-non_existing_fail--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v2-true-client0] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-Results] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client10-year Int64 NOT NULL-True] [GOOD] >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v1-true-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client11-year Int64-False] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] >> test.py::test[action-eval_sample--Results] [GOOD] >> test.py::test[action-eval_values_output_table_subquery--Results] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-5-kikimr_params2-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-false] >> test.py::test[key_filter-yql-19420--Results] [GOOD] >> test.py::test[library-package--Results] [SKIPPED] >> test.py::test[lineage-if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-isolated-default.txt-Results] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt-Results] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unused_columns-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-field_subset_for_multiusage--Results] >> test.py::test[action-eval_input_output_table_subquery--ForceBlocks] >> test_select_1.py::TestSelect1::test_compile_error[v2] >> test.py::test[join-mergejoin_big_primary--Results] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-ForceBlocks] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test.py::test[order_by-order_with_null-default.txt-Results] [GOOD] >> test.py::test[order_by-sort--ForceBlocks] >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v1-true-client0] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field-ForceBlocks] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-ForceBlocks] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-true] >> test.py::test[aggregate-percentiles_ungrouped--Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client11-year Int64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client12-year Uint64-False] >> test.py::test[join-yql-4275-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-4275-off-Results] [SKIPPED] >> test.py::test[json-jsondocument/select--ForceBlocks] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_time_order_recoverer[v1-kikimr0] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_size_limit.py::TestS3::test_size_limit[v1-client0-100-kikimr_params2-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-false] >> test.py::test[join-equi_join_three_asterisk--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_asterisk--Results] >> test.py::test[action-eval_values_output_table_subquery--Results] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [FAIL] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-false] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] >> test_metrics_cleanup.py::TestCleanup::test_cleanup[v1] >> test.py::test[blocks-pg_to_interval--Results] >> test.py::test[pg-join_using_tables2-default.txt-Results] [GOOD] >> test.py::test[pg-select_table1-default.txt-Results] >> test.py::test[action-eval_input_output_table_subquery--ForceBlocks] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client12-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client13-year Date-False] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test.py::test[join-equi_join_three_asterisk--Results] [GOOD] >> test.py::test[join-flatten_columns1-off-Results] [SKIPPED] >> test.py::test[join-inner_grouped--Results] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |93.1%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> test_size_limit.py::TestS3::test_size_limit[v1-client0-500-kikimr_params2-true] [GOOD] >> test.py::test[order_by-sort--ForceBlocks] [GOOD] >> test.py::test[order_by-sort--Results] >> test.py::test[schema-select_all_inferschema-extra_field-ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field-Results] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-false] |93.1%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |93.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test.py::test[json-jsondocument/select--ForceBlocks] [GOOD] >> test.py::test[json-jsondocument/select--Results] >> test.py::test[action-eval_input_output_table_subquery--Results] [GOOD] >> test.py::test[action-eval_like--ForceBlocks] >> test.py::test[join-mergejoin_force_align1-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-Results] [SKIPPED] >> test.py::test[join-pullup_left--ForceBlocks] >> test_format_setting.py::TestS3::test_timestamp_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge2-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.csv-csv_with_names] >> test.py::test[aggregate-percentiles_ungrouped--Results] [GOOD] >> test.py::test[aggregate-subquery_aggregation--ForceBlocks] >> test.py::test[distinct-distinct_union_all-default.txt-Results] [GOOD] >> test.py::test[dq-wrong_script--Results] [SKIPPED] >> test.py::test[expr-as_table_emptylist--Results] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] |93.1%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test_explicit_partitioning_0.py::TestS3::test_projection_integer_type_validation[v1-true-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client0-year Int32 NOT NULL-False] >> test.py::test[order_by-sort--Results] [GOOD] >> test.py::test[order_by-union_all--ForceBlocks] >> test.py::test[join-equi_join_three_asterisk--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off-ForceBlocks] >> test.py::test[schema-select_all_inferschema-extra_field-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range--ForceBlocks] >> test.py::test[optimizers-field_subset_for_multiusage--Results] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] |93.2%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[json-jsondocument/select--Results] [GOOD] >> test.py::test[key_filter-mixed_opt_bounds--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-mixed_opt_bounds--Results] >> test.py::test[key_filter-mixed_opt_bounds--Results] [SKIPPED] >> test.py::test[key_filter-nile_pred--ForceBlocks] >> test.py::test[insert-append_with_read_udf_fail--Results] >> test.py::test[blocks-pg_to_interval--Results] [GOOD] >> test.py::test[blocks-sort_one_asc--Results] >> test.py::test[pg-select_table1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q01-default.txt-Results] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v2-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v1-client0] >> test_select_1.py::TestSelect1::test_compile_error[v2] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_match_recognize_sink[v1] >> test.py::test[insert-append_with_read_udf_fail--Results] [GOOD] >> test.py::test[insert-drop_sortness--ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.tsv-tsv_with_names] >> test.py::test[action-subquery_merge2-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client0-year Int32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client1-year Uint32 NOT NULL-False] >> test.py::test[expr-as_table_emptylist--Results] [GOOD] >> test.py::test[expr-evaluate_parse_inf_nan--Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_some_fail--Results] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-false] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--ForceBlocks] >> test.py::test[action-eval_like--ForceBlocks] [GOOD] >> test.py::test[action-eval_like--Results] >> test.py::test[join-pullup_left--ForceBlocks] [GOOD] >> test.py::test[join-pullup_left--Results] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-true] >> test.py::test[type_v3-append_struct-default.txt-Results] >> test.py::test[pg-tpcds-q01-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Results] [SKIPPED] >> test.py::test[aggregate-subquery_aggregation--ForceBlocks] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Results] >> test.py::test[join-equi_join_three_asterisk-off-ForceBlocks] [GOOD] >> test.py::test[join-three_equalities_paren--ForceBlocks] >> test.py::test[join-equi_join_three_asterisk-off-Results] [SKIPPED] >> test.py::test[join-inner_all-off-ForceBlocks] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test.py::test[schema-select_all_inferschema_range--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Results] >> test.py::test[join-inner_grouped--Results] [GOOD] >> test.py::test[join-inner_grouped_by_expr--Results] >> test.py::test[key_filter-nile_pred--ForceBlocks] [GOOD] >> test.py::test[key_filter-nile_pred--Results] >> test.py::test[expr-non_persistable_group_by_some_fail--Results] [GOOD] >> test.py::test[expr-non_persistable_order_by_fail--Results] >> test.py::test[action-eval_like--Results] [GOOD] >> test.py::test[action-nested_subquery--ForceBlocks] >> test.py::test[blocks-sort_one_asc--Results] [GOOD] >> test.py::test[blocks-string_pass--Results] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.json-json_each_row] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client1-year Uint32 NOT NULL-False] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client2-year Uint64 NOT NULL-False] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-5-kikimr_params2-true] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-false] >> test.py::test[order_by-union_all--ForceBlocks] [GOOD] >> test.py::test[order_by-union_all--Results] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] [GOOD] >> test.py::test[optimizers-test_lmap_opts--Results] [SKIPPED] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] >> test.py::test[aggregate-subquery_aggregation--Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Results] [GOOD] >> test.py::test[schema-select_field-read_schema-ForceBlocks] >> test.py::test[binding-table_from_binding-default.txt-ForceBlocks] >> test.py::test[key_filter-nile_pred--Results] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter2--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-split_input_with_key_filter2--Results] [SKIPPED] >> test.py::test[key_filter-uuid--ForceBlocks] >> test_watermarks.py::TestWatermarks::test_pq_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_query [SKIPPED] >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions >> test.py::test[join-pullup_left--Results] [GOOD] >> test.py::test[join-pullup_left-off-ForceBlocks] >> test.py::test[insert-drop_sortness--ForceBlocks] [GOOD] >> test.py::test[insert-drop_sortness--Results] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-false] [GOOD] >> test.py::test[expr-non_persistable_order_by_fail--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_int--Results] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-true] >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client2-year Uint64 NOT NULL-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.parquet-parquet] >> test.py::test[order_by-union_all--Results] [GOOD] >> test.py::test[insert-drop_sortness--Results] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client3-year Date NOT NULL-False] >> test.py::test[order_by-yql-19598--ForceBlocks] >> test.py::test[join-three_equalities_paren--ForceBlocks] [GOOD] >> test.py::test[join-three_equalities_paren--Results] >> test.py::test[join-inner_all-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_all-off-Results] [SKIPPED] >> test.py::test[join-join_table_conflict_fail--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [FAIL] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d82/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d82/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3162479 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_stop.py::TestStop::test_stop_query[v1-streaming] >> test.py::test[type_v3-append_struct-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Results] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-100-kikimr_params2-true] [GOOD] >> test.py::test[action-nested_subquery--ForceBlocks] [GOOD] >> test.py::test[action-nested_subquery--Results] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-false] >> test.py::test[type_v3-insert_struct_v3_with_native--ForceBlocks] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] >> test_recovery.py::TestRecovery::test_delete >> test.py::test[blocks-string_pass--Results] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Results] >> test.py::test[action-nested_subquery--Results] [GOOD] >> test.py::test[aggr_factory-min-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v1-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test.py::test[schema-select_field-read_schema-ForceBlocks] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.csv-csv_with_names] >> test.py::test[join-join_table_conflict_fail--ForceBlocks] [GOOD] >> test.py::test[join-join_table_conflict_fail--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--ForceBlocks] >> test.py::test[schema-select_field-read_schema-Results] >> test.py::test[binding-table_from_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-Results] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Results] [SKIPPED] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] >> test.py::test[file-parse_file_in_select_as_int--Results] [GOOD] >> test.py::test[file-where_key_in_file_content--Results] >> test.py::test[join-three_equalities_paren--Results] [GOOD] >> test.py::test[key_filter-is_null_or_data--ForceBlocks] >> test.py::test[pg-tpcds-q28-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-Results] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] [GOOD] >> test.py::test[type_v3-json--ForceBlocks] >> test.py::test[aggr_factory-hll-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-sum_if-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client3-year Date NOT NULL-False] [GOOD] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-false] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client4-year Utf8 NOT NULL-False] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] >> test.py::test[join-pullup_left-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_left-off-Results] [SKIPPED] >> test.py::test[join-pullup_renaming--ForceBlocks] >> test.py::test[schema-select_field-read_schema-Results] [GOOD] >> test.py::test[schema-user_schema_append--ForceBlocks] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_ref--Results] >> test.py::test[binding-table_from_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int32--ForceBlocks] >> test.py::test[join-inner_grouped_by_expr--Results] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug8533-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] >> test.py::test[type_v3-decimal_yt_llvm--Results] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Results] >> test.py::test[order_by-yql-19598--ForceBlocks] [GOOD] >> test.py::test[order_by-yql-19598--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_schema_metadata[v1-mvp_external_ydb_endpoint0-without_checkpoints] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3586419) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:55:01] send response localhost:11125/?database=local ::1 - - [05/May/2025 09:55:01] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:55:16] send response localhost:11125/?database=local ::1 - - [05/May/2025 09:55:16] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Results] |93.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_ydb_over_fq.py::TestYdbOverFq::test_execute_data_query_error[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v2-client0] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.tsv-tsv_with_names] >> test.py::test[schema-user_schema_append--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_append--Results] [GOOD] >> test.py::test[select-from_in_front-default.txt-ForceBlocks] >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] [GOOD] >> test.py::test[order_by-yql-19598--Results] [GOOD] >> test.py::test[pg-pg_types_orderby--ForceBlocks] >> test.py::test[insert_monotonic-several1-default.txt-Results] [GOOD] >> test.py::test[join-alias_where_group-off-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client4-year Utf8 NOT NULL-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_pq_read_write.py::TestPqReadWrite::test_pq_read_write[v1-without_checkpoints-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3587215) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:55:02] send response localhost:22610/?database=local ::1 - - [05/May/2025 09:55:02] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:55:17] send response localhost:22610/?database=local ::1 - - [05/May/2025 09:55:17] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client5-year Int64 NOT NULL-False] |93.2%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[pg-tpcds-q37-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q42-default.txt-Results] >> test_yq_streaming.py::TestYqStreaming::test_match_recognize_sink[v1] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Results] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Results] |93.2%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[file-where_key_in_file_content--Results] [GOOD] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] >> test.py::test[type_v3-decimal_yt_nollvm--Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs--Results] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.json-json_each_row] >> test.py::test[key_filter-is_null_or_data--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_or_data--Results] >> test.py::test[key_filter-uuid--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-min-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Results] >> test.py::test[key_filter-uuid--Results] >> test.py::test[join-join_without_correlation_and_struct_access--Results] >> test.py::test[type_v3-json--ForceBlocks] [GOOD] >> test.py::test[type_v3-json--Results] >> test.py::test[blocks-add_int32--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int32--Results] >> test.py::test[join-pullup_renaming--ForceBlocks] [GOOD] >> test.py::test[join-pullup_renaming--Results] >> test.py::test[join-lookupjoin_inner_empty_subq--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-Results] >> test.py::test[join-lookupjoin_inner_empty_subq-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o--Results] >> test_recovery.py::TestRecovery::test_delete [GOOD] >> test.py::test[aggr_factory-sum_if-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_filter_pushdown--Results] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v2-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v1-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client6-year Int32-False] >> test.py::test[key_filter-is_null_or_data--Results] [GOOD] >> test.py::test[lineage-some_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-direct_row_after_merge--ForceBlocks] >> test.py::test[blocks-add_int32--Results] [GOOD] >> test.py::test[blocks-bitcast_scalar--ForceBlocks] >> test.py::test[type_v3-json--Results] [GOOD] >> test.py::test[type_v3-mixed_with_columns--ForceBlocks] >> test.py::test[select-from_in_front-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-from_in_front-default.txt-Results] >> test.py::test[pg-tpcds-q42-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q46-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_insert.py::TestS3::test_insert_without_format_error[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ca4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ca4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3508203) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ca4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/test_insert.py.TestS3.test_insert.v2-client0-json_list-dataset/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ca4/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_insert/testing_out_stuff/test_insert.py.TestS3.test_insert.v2-client0-json_list-dataset/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3512729 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_yds_bindings.py::TestBindings::test_yds_insert[v1] [SKIPPED] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.parquet-parquet] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] >> test.py::test[join-join_without_correlation_and_struct_access--Results] [GOOD] >> test.py::test[join-left_trivial--ForceBlocks] >> test.py::test[aggr_factory-min-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_compile_error[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3585957) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[join-alias_where_group-off-ForceBlocks] [GOOD] >> test.py::test[join-alias_where_group-off-Results] [SKIPPED] >> test.py::test[join-anyjoin_merge_nodup-off-ForceBlocks] >> test.py::test[case-case_size_eq_cast-default.txt-Results] [GOOD] >> test.py::test[coalesce-coalesce--Results] >> test.py::test[key_filter-uuid--Results] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-ForceBlocks] >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] >> test.py::test[produce-reduce_multi_in_ref--Results] [GOOD] >> test.py::test[produce-reduce_with_python--Results] [SKIPPED] >> test.py::test[ql_filter-integer_single--Results] >> test.py::test[join-pullup_renaming--Results] [GOOD] >> test.py::test[join-pullup_renaming-off-ForceBlocks] >> test.py::test[udf-named_args_for_script_with_posargs--Results] [GOOD] >> test.py::test[union_all-infer_3-default.txt-Results] >> test_ydb_over_fq.py::TestYdbOverFq::test_explain_data_query[v1-client0] [GOOD] >> test.py::test[select-from_in_front-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_semi--ForceBlocks] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v2-client0] >> test.py::test[select-match_clause--ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client6-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client7-year Uint32-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3592640) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:55:18] send response localhost:19652/?database=local ::1 - - [05/May/2025 09:55:18] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test.py::test[pg-tpcds-q96-default.txt-ForceBlocks] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v2-client0] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v1-client0] >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] [GOOD] >> test.py::test[blocks-bitcast_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-bitcast_scalar--Results] >> test_format_setting.py::TestS3::test_date_time_simple_iso[v2-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.csv-csv_with_names] >> test_ydb_over_fq.py::TestYdbOverFq::test_describe_table[v1-client0] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--ForceBlocks] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--Results] >> test.py::test[pg-tpcds-q46-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q55-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_explicit_partitioning_1.py::TestS3::test_valid_projected_column_values[v1-true-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce7/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ce7/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3373230) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3377503 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[join-lookupjoin_semi_1o2o--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off-Results] >> test.py::test[optimizers-sort_constraint_in_left--ForceBlocks] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] >> test.py::test[type_v3-mixed_with_columns--ForceBlocks] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Results] >> test.py::test[join-lookupjoin_with_cache-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_left_null_column--Results] >> test.py::test[pg-pg_types_orderby--ForceBlocks] [GOOD] >> test.py::test[pg-pg_types_orderby--Results] [SKIPPED] >> test.py::test[pg-tpcds-q26-default.txt-ForceBlocks] >> test.py::test[coalesce-coalesce--Results] [GOOD] >> test.py::test[column_group-groups-lookup-Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5--Results] [SKIPPED] >> test.py::test[column_group-hint_unk_col_fail--Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail--Results] [SKIPPED] >> test.py::test[column_group-publish-perusage-Results] [SKIPPED] >> test.py::test[column_order-align_publish_native--Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client7-year Uint32-False] [GOOD] >> test.py::test[union_all-infer_3-default.txt-Results] [GOOD] >> test.py::test[view-file_inner--Results] >> test.py::test[ql_filter-integer_single--Results] [GOOD] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] >> test.py::test[blocks-bitcast_scalar--Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--ForceBlocks] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client8-year Int64-False] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test.py::test[join-left_trivial--ForceBlocks] [GOOD] >> test.py::test[join-left_trivial--Results] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.tsv-tsv_with_names] >> test.py::test[aggregate-agg_filter_pushdown--Results] [GOOD] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] >> test.py::test[optimizers-direct_row_after_merge--Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--ForceBlocks] >> test.py::test[type_v3-mixed_with_columns--Results] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] >> test.py::test[limit-empty_sort_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v2-client0] [GOOD] >> test.py::test[join-lookupjoin_semi--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi--Results] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] >> test.py::test[select-match_clause--ForceBlocks] [GOOD] >> test.py::test[select-match_clause--Results] >> test.py::test[pg-tpcds-q96-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q96-default.txt-Results] >> test.py::test[join-pullup_renaming-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_renaming-off-Results] [SKIPPED] >> test.py::test[join-right_trivial--ForceBlocks] >> test.py::test[pg-tpcds-q55-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-ForceBlocks] >> test.py::test[join-anyjoin_merge_nodup-off-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Results] [SKIPPED] >> test.py::test[join-bush_dis_in_in-off-ForceBlocks] >> test.py::test[join-left_trivial--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client8-year Int64-False] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client9-year Uint64-False] >> test.py::test[pg-tpcds-q26-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q26-default.txt-Results] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[pg-tpcds-q96-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q03-default.txt-ForceBlocks] >> test.py::test[select-match_clause--Results] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_semi--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off-ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.json-json_each_row] >> test.py::test[view-file_inner--Results] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format >> test.py::test[pg-tpcds-q26-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q44-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1--Results] [SKIPPED] >> test.py::test[hor_join-group_yamr--Results] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] [GOOD] >> test.py::test[sampling-insert--Results] >> test.py::test[optimizers-sort_constraint_in_left--ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_constraint_in_left--Results] >> test.py::test[join-mapjoin_left_null_column--Results] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] >> test.py::test[blocks-combine_all_avg_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_avg_filter--Results] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Results] >> test.py::test[udf-udaf_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[udf-udaf_lambda-default.txt-Results] >> test.py::test[column_order-align_publish_native--Results] [GOOD] >> test.py::test[column_order-select_groupby_with_star-default.txt-Results] >> test.py::test[pg-tpcds-q63-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q83-default.txt-Results] >> test.py::test[order_by-SortByOneField--Results] >> test.py::test[optimizers-length_over_merge_fs_multiusage--ForceBlocks] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] >> test_recovery.py::TestRecovery::test_ic_disconnection [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client9-year Uint64-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client10-year String NOT NULL-True] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.parquet-parquet] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test.py::test[join-right_trivial--ForceBlocks] [GOOD] >> test.py::test[join-right_trivial--Results] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-finished] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-ForceBlocks] >> test.py::test[udf-udaf_lambda-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-ForceBlocks] >> test.py::test[select-one_labeled_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-Results] >> test.py::test[blocks-combine_all_avg_filter--Results] [GOOD] >> test.py::test[blocks-complex_scalars--ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v1-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test.py::test[join-bush_dis_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Results] [SKIPPED] >> test.py::test[join-force_merge_join-default.txt-ForceBlocks] >> test.py::test[aggregate-disable_blocks_with_spilling--ForceBlocks] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling--Results] [SKIPPED] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-ForceBlocks] >> test.py::test[optimizers-sort_constraint_in_left--Results] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage--ForceBlocks] |93.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.csv-csv_with_names] >> test.py::test[join-lookupjoin_inner_1o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_sharded-default.txt-ForceBlocks] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] [GOOD] >> test.py::test[hor_join-group_yamr--Results] [GOOD] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] >> test.py::test[hor_join-less_outs--Results] [SKIPPED] >> test.py::test[insert-append_missing_null-default.txt-Results] >> test.py::test[join-lookupjoin_semi_1o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-off-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client10-year String NOT NULL-True] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client11-year String-False] >> test.py::test[pg-tpcds-q44-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q44-default.txt-Results] >> test.py::test[select-one_labeled_column-default.txt-Results] [GOOD] >> test.py::test[tpch-q7-default.txt-ForceBlocks] >> test.py::test[limit-yql-9617_empty_lambda-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_where-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_member_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_member_struct-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery.py::TestRecovery::test_ic_disconnection [SKIPPED] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3602399) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.3%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] [SKIPPED] >> test.py::test[join-right_trivial--Results] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--ForceBlocks] >> test.py::test[pg-tpcds-q83-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Results] >> test.py::test[sampling-insert--Results] [GOOD] >> test.py::test[schema-def_values--Results] >> test.py::test[order_by-SortByOneField--Results] [GOOD] >> test.py::test[order_by-literal_desc--Results] [SKIPPED] >> test.py::test[order_by-literal_with_assume_desc--Results] [SKIPPED] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] |93.3%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_size_limit.py::TestS3::test_size_limit[v2-client0-500-kikimr_params2-true] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000c82/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000c82/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_size_limit/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3542707) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3544780 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] [GOOD] >> test.py::test[window-full/syscolumns--Results] >> test.py::test[pg-tpch-q03-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q03-default.txt-Results] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.csv-csv_with_names] [GOOD] >> test.py::test[pg-tpcds-q44-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q53-default.txt-ForceBlocks] |93.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] [SKIPPED] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.tsv-tsv_with_names] >> test.py::test[optimizers-yql-6133_skip_deps--ForceBlocks] >> test.py::test[optimizers-yql-6133_skip_deps--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-6133_skip_deps--Results] [SKIPPED] >> test.py::test[order_by-SortByOneField--ForceBlocks] >> test.py::test[optimizers-yql-5833-table_content--Results] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_many_partitions [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3600543) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client11-year String-False] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client12-year Utf8-False] >> test.py::test[blocks-complex_scalars--ForceBlocks] [GOOD] >> test.py::test[blocks-complex_scalars--Results] >> test.py::test[join-force_merge_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-force_merge_join-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yds_bindings.py::TestBindings::test_raw_empty_schema_binding[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3591908) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:55:16] send response localhost:27895/?database=local ::1 - - [05/May/2025 09:55:16] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[insert-append_missing_null-default.txt-Results] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Results] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] >> test.py::test[join-mapjoin_sharded-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt-Results] >> test.py::test[pg-tpch-q03-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-duplicated_rowspec--ForceBlocks] >> test.py::test[join-mapjoin_with_empty_struct-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off-Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group--ForceBlocks] >> test.py::test[optimizers-yql-5978_fill_multi_usage--ForceBlocks] [GOOD] >> test.py::test[schema-def_values--Results] [GOOD] >> test.py::test[schema-insert-row_spec-Results] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.json-json_each_row] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] >> test.py::test[union_all-union_all_fields-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_num_access--ForceBlocks] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-Results] >> test.py::test[schema-limit_directread--Results] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_time_order_recoverer[v1-kikimr0] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Results] >> test.py::test[blocks-complex_scalars--Results] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--ForceBlocks] >> test.py::test[pg-tpcds-q53-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q53-default.txt-Results] >> test.py::test[column_order-select_groupby_with_star-default.txt-Results] [GOOD] >> test.py::test[column_order-select_sample-default.txt-Results] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--ForceBlocks] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--ForceBlocks] >> test.py::test[join-force_merge_join-default.txt-Results] [GOOD] >> test.py::test[join-join_comp_common_table-off-ForceBlocks] >> test.py::test[join-star_join_inners_vk_sorted--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted--Results] >> test_s3_0.py::TestS3::test_checkpoints_on_join_s3_with_yds[v1-mvp_external_ydb_endpoint0-client0] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client12-year Utf8-False] [GOOD] >> test_s3_0.py::TestS3::test_double_optional_types_validation[v2-client0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client13-year Date-False] >> test.py::test[order_by-SortByOneField--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByOneField--Results] >> test.py::test[tpch-q7-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt-Results] [GOOD] >> test.py::test[join-mapjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_unused_keys--Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case1[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3607454) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[tpch-q7-default.txt-Results] >> test.py::test[pg-tpcds-q53-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-ForceBlocks] |93.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.parquet-parquet] >> test.py::test[insert-literals_to_string-default.txt-Results] [GOOD] >> test.py::test[insert-part_sortness--Results] |93.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[join-mapjoin_unused_keys--Results] [SKIPPED] >> test.py::test[pg_duplicated-duplicated_rowspec--ForceBlocks] [GOOD] >> test.py::test[pg_duplicated-duplicated_rowspec--Results] >> test.py::test[order_by-SortByOneField--Results] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-ForceBlocks] >> test_recovery_mz.py::TestRecovery::test_recovery[v1] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] >> test.py::test[schema-limit_directread--Results] [GOOD] >> test.py::test[schema-select_all-row_spec-Results] >> test.py::test[schema-insert-row_spec-Results] [GOOD] >> test.py::test[schema-insert_sorted-schema-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-false-client13-year Date-False] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--ForceBlocks] [GOOD] >> test.py::test[pg_catalog-lambda--Results] >> test.py::test[join-opt_on_opt_side_with_group--Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client0-year Int32 NOT NULL-False] >> test.py::test[tpch-q7-default.txt-Results] [GOOD] >> test.py::test[udf-regexp_udf--ForceBlocks] >> test.py::test[pg_duplicated-duplicated_rowspec--Results] [GOOD] >> test.py::test[produce-process_multi_in_trivial_lambda--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_multi_in_trivial_lambda--Results] >> test.py::test[order_by-order_by_tuple-default.txt-Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-Results] >> test.py::test[produce-process_multi_in_trivial_lambda--Results] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-ForceBlocks] >> test.py::test[join-star_join_inners_vk_sorted--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-ForceBlocks] >> test.py::test[weak_field-weak_field_num_access--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Results] >> test.py::test[column_order-select_sample-default.txt-Results] [GOOD] >> test.py::test[column_order-select_win_func-default.txt-Results] >> test.py::test[blocks-decimal_op_decimal_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] >> test_format_setting.py::TestS3::test_date_time_simple_iso_insert[v2-date_time/simple_iso/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] [GOOD] >> test.py::test[order_by-native_desc_sort_calc--Results] [SKIPPED] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] >> test.py::test[optimizers-yql-7324_duplicate_arg--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_validation.py::TestS3::test_nested_type[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_validation/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd3/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_validation/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3423753) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3428541 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[aggregate-group_by_hop_compact--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--Results] >> test.py::test[column_group-hint-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_unk_col_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_unk_col_fail--Results] [SKIPPED] >> test.py::test[column_order-union_all-default.txt-ForceBlocks] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v1] >> test.py::test[aggregate-group_by_hop_compact--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--ForceBlocks] >> test.py::test[insert-part_sortness--Results] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--ForceBlocks] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] >> test.py::test[schema-select_all-row_spec-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client0-year Int32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client1-year Uint32 NOT NULL-False] >> test.py::test[join-opt_on_opt_side_with_group--Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-off-ForceBlocks] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--ForceBlocks] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case2[v1] >> test.py::test[pg-tpcds-q57-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-Results] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.json-json_each_row] >> test.py::test[schema-insert_sorted-schema-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] >> test.py::test[window-full/syscolumns--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> test.py::test[pg_catalog-lambda--Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull--Results] [SKIPPED] >> test.py::test[produce-fuse_reduces_with_presort--Results] |93.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] >> test.py::test[udf-regexp_udf--ForceBlocks] [GOOD] >> test.py::test[udf-regexp_udf--Results] >> test.py::test[action-action_eval_cluster_use--ForceBlocks] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test.py::test[insert-trivial_select-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_very_complex_type-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_big_primary_unique-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_left_null_column-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort--Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.parquet-parquet] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] [GOOD] >> test.py::test[order_by-singular-default.txt-ForceBlocks] >> test.py::test[pg-doubles_search_path-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client1-year Uint32 NOT NULL-False] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test.py::test[blocks-top_sort_two_desc--ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client2-year Uint64 NOT NULL-False] >> test.py::test[pg-tpcds-q57-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-ForceBlocks] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] [GOOD] >> test.py::test[insert-yql-13083--Results] >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] [GOOD] >> test.py::test[order_by-order_with_null-default.txt-Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] [GOOD] >> test.py::test[column_order-union_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-union_all-default.txt-Results] >> test.py::test[schema-select_all-yamred_dsv-Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-Results] [SKIPPED] >> test.py::test[join-yql-16011--ForceBlocks] [SKIPPED] >> test.py::test[join-yql-16011--Results] [SKIPPED] >> test.py::test[udf-regexp_udf--Results] [GOOD] >> test.py::test[union_all-union_all_multiin--ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] >> test.py::test[aggregate-group_by_hop_only--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_only--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_distinct--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-ForceBlocks] >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test.py::test[join-join_comp_common_table-off-ForceBlocks] [GOOD] >> test.py::test[join-join_comp_common_table-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o--ForceBlocks] |93.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[join-yql-16011--Results] [SKIPPED] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] [GOOD] >> test.py::test[schema-select_all-schema-Results] >> test.py::test[column_order-union_all-default.txt-Results] [GOOD] >> test.py::test[expr-langver--ForceBlocks] >> test.py::test[window-win_func_aggr_4func_no_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Results] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] [GOOD] >> test.py::test[order_by-changed_sort_with_limit--ForceBlocks] [SKIPPED] >> test.py::test[order_by-changed_sort_with_limit--Results] [SKIPPED] >> test.py::test[pg-join_using_tables3-default.txt-ForceBlocks] >> test.py::test[join-opt_on_opt_side_with_group-off-ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_with_remap-off-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client2-year Uint64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client3-year Date NOT NULL-False] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] >> test.py::test[produce-reduce_multi_in_keytuple--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Results] >> test.py::test[schema-select_all-yamred_dsv-Results] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field-Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Results] >> test.py::test[action-action_eval_cluster_use--ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_use--Results] >> test.py::test[insert-trivial_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-trivial_select-default.txt-Results] >> test.py::test[blocks-interval_add_date_scalar--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--ForceBlocks] >> test.py::test[blocks-interval_add_date_scalar--Results] >> test.py::test[order_by-singular-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-singular-default.txt-Results] >> test.py::test[blocks-top_sort_two_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.json-json_each_row] >> test.py::test[column_order-select_win_func-default.txt-Results] [GOOD] >> test.py::test[column_order-union_all_positional_unordered_fail--Results] >> test.py::test[pg-tpcds-q62-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-ForceBlocks] >> test.py::test[schema-select_all-schema-Results] [GOOD] >> test.py::test[union_all-union_all_multiin--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema2--Results] >> test.py::test[union_all-union_all_multiin--Results] >> test.py::test[pg-join_using_tables1-default.txt-Results] [GOOD] >> test.py::test[pg-select_limit-default.txt-Results] >> test.py::test[insert-trivial_select-default.txt-Results] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-off-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client3-year Date NOT NULL-False] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client4-year Utf8 NOT NULL-False] >> test.py::test[action-action_eval_cluster_use--Results] [GOOD] >> test.py::test[action-eval_drop--ForceBlocks] >> test.py::test[insert-yql-13083--Results] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off-Results] [SKIPPED] >> test.py::test[join-bush_in_in-off-Results] [SKIPPED] >> test.py::test[join-bush_in_in_in--Results] >> test.py::test[expr-langver--ForceBlocks] [GOOD] >> test.py::test[expr-langver--Results] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-strict_keys--Results] >> test.py::test[produce-reduce_multi_in_keytuple--Results] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-ForceBlocks] >> test.py::test[order_by-singular-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Results] >> test.py::test[pg-aggregate_combine--ForceBlocks] >> test.py::test[pg-join_using_tables3-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Results] >> test.py::test[order_by-order_with_null-default.txt-Results] [GOOD] >> test.py::test[order_by-union_all--Results] >> test.py::test[blocks-top_sort_two_desc--Results] [GOOD] >> test.py::test[column_group-groups-lookup-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-lookup-Results] >> test.py::test[column_group-groups-lookup-Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-ForceBlocks] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] [GOOD] >> test.py::test[window-win_func_first_last--Results] >> test.py::test[union_all-union_all_multiin--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine--ForceBlocks] |93.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-langver--Results] [GOOD] >> test.py::test[file-file_constness--ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] >> test.py::test[column_order-union_all_positional_unordered_fail--Results] [GOOD] >> test.py::test[count-count--Results] >> test.py::test[blocks-interval_add_date_scalar--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval--ForceBlocks] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] >> test.py::test[schema-select_all_inferschema-extra_field-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_limit--Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.parquet-parquet] >> test.py::test[join-strict_keys--Results] [GOOD] >> test.py::test[join-yql-4275--ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client4-year Utf8 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client5-year Int64 NOT NULL-False] >> test.py::test[join-lookupjoin_semi_2o--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--ForceBlocks] >> test.py::test[join-premap_merge_with_remap-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_with_remap-off-Results] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-ForceBlocks] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-ForceBlocks] >> test.py::test[pg-select_limit-default.txt-Results] [GOOD] >> test.py::test[pg-select_table2-default.txt-Results] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case2[v1] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix[v2-common/simple_posix/test.parquet-parquet] [GOOD] >> test.py::test[action-eval_drop--ForceBlocks] [GOOD] >> test.py::test[action-eval_drop--Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] >> test.py::test[schema-select_all_inferschema2--Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op--Results] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Results] [GOOD] >> test.py::test[pg-nulls_native-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q06-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-Results] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] >> test.py::test[pg-aggregate_combine--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_combine--Results] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] [GOOD] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client6-year Int32-False] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] >> test.py::test[order_by-union_all--Results] [GOOD] >> test.py::test[file-file_constness--ForceBlocks] [GOOD] >> test.py::test[file-file_constness--Results] >> test.py::test[weak_field-optimize_weak_fields_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-off-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys-off-Results] [SKIPPED] >> test.py::test[join-equi_join_three_simple--ForceBlocks] >> test.py::test[blocks-interval_sub_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_sub_interval--Results] >> test.py::test[action-eval_drop--Results] [GOOD] >> test.py::test[action-pending_arg_fail--ForceBlocks] >> test.py::test[schema-select_all_inferschema_limit--Results] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test.py::test[window-win_func_first_last--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). |93.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[order_by-union_all--Results] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Results] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] >> test.py::test[pg-tpch-q06-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_optional_null--ForceBlocks] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test.py::test[join-yql-4275--ForceBlocks] [GOOD] >> test.py::test[join-yql-4275--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_stop_start_with_filter [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3584632) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[action-eval_if_guard-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] >> test.py::test[window-win_func_over_group_by_list_names--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Results] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v1] [GOOD] >> test.py::test[pg-aggregate_combine--Results] [GOOD] >> test.py::test[pg-select_starref1-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_state_load_mode[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3598330) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having--Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.tsv-tsv_with_names] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Results] [SKIPPED] >> test.py::test[join-star_join_semionly--ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] >> test.py::test[file-file_constness--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_usage--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage--Results] [SKIPPED] >> test.py::test[in-basic_in-default.txt-ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] [GOOD] >> test.py::test[window-distinct_over_window_full_frames--ForceBlocks] >> test.py::test[action-pending_arg_fail--ForceBlocks] [GOOD] >> test.py::test[action-pending_arg_fail--Results] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-ForceBlocks] >> test.py::test[blocks-interval_sub_interval--Results] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-ForceBlocks] >> test.py::test[pg-select_table2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q04-default.txt-Results] |93.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[pg-tpch-q06-default.txt-Results] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt-Results] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] >> test.py::test[action-eval_if_guard-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-ForceBlocks] >> test.py::test[schema-select_all_inferschema_op--Results] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] >> test.py::test[column_order-join_nosimple--ForceBlocks] [SKIPPED] >> test.py::test[column_order-join_nosimple--Results] [SKIPPED] >> test.py::test[distinct-distinct_count_only-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client6-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client7-year Uint32-False] >> test_stop.py::TestStop::test_stop_query[v1-streaming] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.json-json_each_row] [GOOD] >> test.py::test[join-yql-4275--Results] [GOOD] >> test.py::test[key_filter-dict_contains_optional--ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] >> test.py::test[join-bush_in_in_in--Results] [GOOD] >> test.py::test[join-bush_in_in_in-off-Results] [SKIPPED] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_no_infer--Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_restart_compute_node [GOOD] >> test.py::test[join-equi_join_three_simple--ForceBlocks] [GOOD] |93.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[join-equi_join_three_simple--Results] >> test.py::test[pg-nulls_native-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-nulls_native-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_ydb_over_fq.py::TestYdbOverFq::test_insert_data_query[v1-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cc6/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_ydb_over_fq/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cc6/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_ydb_over_fq/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3446891) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3450717 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[ql_filter-integer_optional_null--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_optional_null--Results] |93.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[join-bush_in_in_in-off-Results] [SKIPPED] >> test.py::test[pg-select_starref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] >> test.py::test[action-subquery_opt_args-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client7-year Uint32-False] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client8-year Int64-False] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] >> test.py::test[join-mergejoin_with_table_range--Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Results] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test.py::test[join-yql-8131-off-Results] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] >> test.py::test[coalesce-coalesce_sugar-default.txt-ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] >> test_recovery_mz.py::TestRecovery::test_recovery[v1] [GOOD] >> test.py::test[join-yql-8131-off-Results] [SKIPPED] >> test.py::test[key_filter-empty_range_over_dynamic--Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v1-common/simple_posix/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many--Results] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-ForceBlocks] >> test.py::test[action-subquery_merge1-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] >> test.py::test[join-star_join_semionly--ForceBlocks] [GOOD] >> test.py::test[join-star_join_semionly--Results] >> test.py::test[aggregate-group_by_expr_and_having--Results] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-Results] >> test.py::test[ql_filter-integer_optional_null--Results] [GOOD] >> test.py::test[ql_filter-integer_single_equals--ForceBlocks] >> test.py::test[aggregate-group_by_expr_with_join--Results] >> test.py::test[pg-tpcds-q12-default.txt-ForceBlocks] >> test.py::test[in-basic_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-basic_in-default.txt-Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[action-subquery_opt_args-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_no_infer--Results] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] >> test.py::test[distinct-distinct_count_only-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-Results] >> test.py::test[pg-nulls_native-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_small_primary--Results] >> test.py::test[coalesce-coalesce_sugar-default.txt-Results] [GOOD] >> test.py::test[column_group-hint_anon-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[action-subquery_merge1-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] >> test.py::test[join-equi_join_three_simple--Results] [GOOD] >> test.py::test[join-equi_join_three_simple-off-ForceBlocks] >> test.py::test[flatten_by-flatten_dict_by_opt--ForceBlocks] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] >> test.py::test[key_filter-dict_contains_optional--ForceBlocks] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] >> test.py::test[in-basic_in-default.txt-Results] [GOOD] >> test.py::test[in-yql-10038-default.txt-ForceBlocks] >> test.py::test[tpch-q4-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client8-year Int64-False] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.csv-csv_with_names] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client9-year Uint64-False] >> test_s3_0.py::TestS3::test_double_optional_types_validation[v2-client0] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] |93.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[distinct-distinct_count_only-default.txt-Results] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] >> test.py::test[distinct-distinct_join-default.txt-ForceBlocks] >> test.py::test[join-star_join_semionly--Results] [GOOD] >> test.py::test[key_filter-yql-19420--ForceBlocks] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] [GOOD] >> test.py::test[select-opt_list_access-default.txt-Results] >> test.py::test[key_filter-empty_range_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] >> test.py::test[window-distinct_over_window_full_frames--ForceBlocks] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_single_equals--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single_equals--Results] >> test.py::test[pg-tpcds-q12-default.txt-ForceBlocks] [GOOD] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] >> test.py::test[pg-tpcds-q12-default.txt-Results] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[pg-tpcds-q10-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q27-default.txt-Results] >> test.py::test[count-count--Results] [GOOD] >> test.py::test[count-count_all_grouped--Results] >> test.py::test[schema-user_schema_patch_columns--Results] [GOOD] >> test.py::test[select-result_rows_limit--Results] [SKIPPED] >> test.py::test[select-substring-default.txt-Results] >> test.py::test[window-win_func_over_group_by_list_names--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] >> test.py::test[pg-select_qstarref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client9-year Uint64-False] [GOOD] >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client10-year String NOT NULL-True] >> test.py::test[ql_filter-integer_single_equals--Results] [GOOD] >> test.py::test[result_types-data-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.tsv-tsv_with_names] [GOOD] |93.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[window-distinct_over_window_full_frames--ForceBlocks] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-ForceBlocks] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] >> test.py::test[pg-tpcds-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-ForceBlocks] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] |93.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[join-mergejoin_force_one_sorted-off-Results] [SKIPPED] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[join-equi_join_three_simple-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_simple-off-Results] [SKIPPED] >> test.py::test[join-full_equal_null--ForceBlocks] >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test.py::test[pg-select_qstarref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_table2-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client10-year String NOT NULL-True] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client11-year String-False] >> test.py::test[join-mergejoin_small_primary--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_dict_by_opt--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] >> test.py::test[in-yql-10038-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-yql-10038-default.txt-Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error >> test.py::test[join-opt_on_opt_side_with_group--Results] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_common_semi-off-Results] [SKIPPED] >> test.py::test[join-premap_map_semi-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1--Results] >> test.py::test[aggregate-group_by_expr_with_join--Results] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery_mz.py::TestRecovery::test_recovery[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3595053) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[aggr_factory-bitor-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Results] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[distinct-distinct_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-19420--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-19420--Results] >> test.py::test[distinct-distinct_join-default.txt-Results] >> test.py::test[key_filter-string_with_legacy--Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-finished] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-ForceBlocks] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] >> test.py::test[select-opt_list_access-default.txt-Results] [GOOD] >> test.py::test[select-refselect--Results] [SKIPPED] >> test.py::test[select-result_size_limit--Results] >> test.py::test[pg-tpcds-q27-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Results] >> test.py::test[select-substring-default.txt-Results] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-Results] >> test.py::test[tpch-q4-default.txt-Results] [GOOD] >> test.py::test[tpch-q7-default.txt-Results] |93.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Results] [SKIPPED] >> test.py::test[select-result_size_limit--Results] [SKIPPED] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client11-year String-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client12-year Utf8-False] |93.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_join--ForceBlocks] [SKIPPED] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] >> test.py::test[flatten_by-flatten_expr_join--Results] >> test.py::test[flatten_by-flatten_expr_join--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_mode-default.txt-ForceBlocks] >> test.py::test[count-count_all_grouped--Results] [GOOD] >> test.py::test[distinct-distinct_and_join--Results] >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] [GOOD] >> test.py::test[in-yql-10038-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta_with_read_udf_fail--ForceBlocks] >> test.py::test[pg-select_table2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_table2-default.txt-Results] >> test.py::test[key_filter-yql-19420--Results] [GOOD] >> test.py::test[lambda-lambda_udf--ForceBlocks] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--ForceBlocks] >> test.py::test[produce-fuse_reduces_with_presort--Results] [GOOD] >> test.py::test[produce-process_multi_in_single_out--Results] [SKIPPED] >> test.py::test[produce-process_with_python-default.txt-Results] >> test_format_setting.py::TestS3::test_date_time_simple_posix_insert[v2-common/simple_posix/test.parquet-parquet] [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-ForceBlocks] >> test.py::test[join-full_equal_null--ForceBlocks] [GOOD] >> test.py::test[join-full_equal_null--Results] >> test.py::test[aggr_factory-bitor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client12-year Utf8-False] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_union_all-default.txt-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client13-year Date-False] >> test.py::test[insert-keepmeta_with_read_udf_fail--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_with_read_udf_fail--Results] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-ForceBlocks] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[pg-tpcds-q23-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-Results] >> test.py::test[join-premap_merge_extrasort1--Results] [GOOD] >> test.py::test[join-pullup_extend--Results] >> test.py::test[pg-select_table2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_start--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt-Results] >> test.py::test[pg-tpcds-q41-default.txt-Results] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] >> test.py::test[pg-tpcds-q53-default.txt-Results] >> test.py::test[key_filter-string_with_legacy--Results] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Results] >> test.py::test[result_types-data-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-data-default.txt-Results] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] >> test.py::test[join-full_equal_null--Results] [GOOD] >> test.py::test[join-inner_trivial_from_concat--ForceBlocks] >> test.py::test[flatten_by-flatten_mode-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v2-true-client13-year Date-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client0-year Int32 NOT NULL-False] >> test.py::test[produce-process_with_python-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_expr-default.txt-Results] >> test.py::test[lambda-lambda_udf--ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_udf--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/typing.py:395: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/threading.py:135: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/threading.py:135: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] >> test.py::test[key_filter-yql-8663-dedup_ranges--ForceBlocks] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] >> test.py::test[pg-tpcds-q23-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-ForceBlocks] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_stop.py::TestStop::test_stop_query[v1-analytics] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] [GOOD] >> test.py::test[in-in_compact_distinct-empty-ForceBlocks] >> test.py::test[insert-trivial_literals-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt-Results] [GOOD] >> test.py::test[select-sum_to_string-default.txt-Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case3[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3620509) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] >> test.py::test[distinct-distinct_union_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] >> test.py::test[lambda-lambda_udf--Results] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_union_all-default.txt-Results] >> test.py::test[result_types-data-default.txt-Results] [GOOD] >> test.py::test[sampling-reduce_with_presort--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] [GOOD] >> test.py::test[pg-tpcds-q53-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-ForceBlocks] >> test.py::test[sampling-reduce_with_presort--Results] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client0-year Int32 NOT NULL-False] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client1-year Uint32 NOT NULL-False] >> test.py::test[like-ilike_clause-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_and_join--Results] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-Results] >> test.py::test[pg-tpcds-q45-default.txt-Results] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] >> test.py::test[insert-trivial_literals-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-to_empty--ForceBlocks] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 >> test.py::test[tpch-q7-default.txt-Results] [GOOD] >> test.py::test[type_v3-type_subset--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--ForceBlocks] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] [GOOD] >> test.py::test[ytflow-select--ForceBlocks] [SKIPPED] >> test.py::test[ytflow-select--Results] >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects >> test.py::test[ytflow-select--Results] [SKIPPED] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[join-inner_trivial_from_concat--ForceBlocks] [GOOD] >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> test.py::test[join-inner_trivial_from_concat--Results] >> test.py::test[distinct-distinct_union_all-default.txt-Results] [GOOD] >> test.py::test[hor_join-filters--ForceBlocks] >> test.py::test[produce-reduce_all_expr-default.txt-Results] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[join-pullup_extend--Results] [GOOD] >> test.py::test[join-pullup_inner--Results] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Results] [GOOD] |93.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[pg-tpcds-q81-default.txt-ForceBlocks] |93.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[sampling-reduce_with_presort--Results] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client1-year Uint32 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client2-year Uint64 NOT NULL-False] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_s3_0.py::TestS3::test_double_optional_types_validation[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ca2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ca2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3508307) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ca2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v2-false-client0/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ca2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_s3_0/testing_out_stuff/test_s3_0.py.TestS3.test_csv.v2-false-client0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3512362 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:55:15] send response localhost:15244/?database=local ::1 - - [05/May/2025 09:55:15] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[select-sum_to_string-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bottom-default.txt-Results] >> test.py::test[select-table_content_with_tmp_folder--Results] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] |93.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[ytflow-select--Results] [SKIPPED] >> test.py::test[in-in_compact_distinct-empty-ForceBlocks] [GOOD] >> test.py::test[in-in_compact_distinct-empty-Results] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] >> test.py::test[pg-tpcds-q34-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Results] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats >> test.py::test[join-inner_trivial_from_concat--Results] [GOOD] >> test.py::test[join-inner_trivial_from_concat-off-ForceBlocks] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list >> test.py::test[key_filter-yql_5895_or-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_10_p_19_plus_1[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3619731) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] [GOOD] >> test.py::test[udf-udaf_distinct--Results] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] >> test.py::test[pg-tpcds-q54-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-Results] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_list_table--Results] >> test.py::test[in-in_compact_distinct-empty-Results] [GOOD] >> test.py::test[insert-append_sorted--ForceBlocks] >> test.py::test[produce-reduce_lambda_list_table--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype--Results] >> test.py::test[insert_monotonic-to_empty--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-to_empty--Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client2-year Uint64 NOT NULL-False] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client3-year Date NOT NULL-False] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] [GOOD] >> test.py::test[lineage-nested_lambda_fields-default.txt-ForceBlocks] [SKIPPED] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] >> test.py::test[lineage-nested_lambda_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-ForceBlocks] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[lineage-select_group_by_key-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Results] >> test.py::test[hor_join-filters--ForceBlocks] [GOOD] >> test.py::test[hor_join-filters--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test.py::test[lineage-select_group_by_key-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-extend-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-extend-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force--ForceBlocks] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force--Results] [SKIPPED] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--ForceBlocks] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Results] [SKIPPED] >> test.py::test[insert_monotonic-to_empty--Results] [GOOD] >> test.py::test[join-anyjoin_common_nodup-off-ForceBlocks] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_only-default.txt-Results] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] [SKIPPED] >> test.py::test[limit-zero_limit-default.txt-Results] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats [GOOD] >> test_metrics_cleanup.py::TestCleanup::test_cleanup[v1] [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats >> test.py::test[aggr_factory-bottom-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q81-default.txt-ForceBlocks] [GOOD] |93.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Results] [SKIPPED] >> test.py::test[pg-tpcds-q81-default.txt-Results] >> test.py::test[select-table_content_with_tmp_folder--Results] [GOOD] >> test.py::test[join-pullup_inner--Results] [GOOD] >> test.py::test[join-pullup_inner-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber--Results] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir >> test.py::test[join-inner_trivial_from_concat-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial_from_concat-off-Results] >> test.py::test[hor_join-filters--Results] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client3-year Date NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client4-year Utf8 NOT NULL-False] >> test.py::test[join-inner_trivial_from_concat-off-Results] [SKIPPED] >> test.py::test[join-join_without_column--ForceBlocks] >> test.py::test[hor_join-fuse_multi_usage-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Results] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Results] [SKIPPED] >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] |93.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct--ForceBlocks] >> test.py::test[aggregate-group_by_hop_distinct--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct--Results] >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] [GOOD] |93.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test.py::test[aggregate-group_by_hop_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_list_key--ForceBlocks] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3602099) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:56:13] send response localhost:22596/?database=local ::1 - - [05/May/2025 09:56:13] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] >> test.py::test[pg-tpcds-q38-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Results] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] |93.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[hor_join-yql-12610_old_table_props--Results] [SKIPPED] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client4-year Utf8 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client5-year Int64 NOT NULL-False] |93.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[select-table_content_with_tmp_folder--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Results] [GOOD] >> test_ydb_backup.py::TestBackupSingle::test_single_table_backup [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test.py::test[distinct-distinct_count_only-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt-Results] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 >> test.py::test[pg-tpcds-q38-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-ForceBlocks] >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-select_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-Results] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[lineage-select_join-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-sort_constraint_in_left--Results] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] >> test.py::test[udf-udaf_distinct--Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> test.py::test[join-join_without_column--ForceBlocks] [GOOD] >> test.py::test[join-join_without_column--Results] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client5-year Int64 NOT NULL-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client6-year Int32-False] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] [GOOD] >> test.py::test[window-win_with_cur_row--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] >> test.py::test[join-pullup_rownumber--Results] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] >> test.py::test[aggregate-group_by_hop_list_key--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_list_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_zero_delay--ForceBlocks] >> test.py::test[join-join_without_column--Results] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client6-year Int32-False] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-off-ForceBlocks] >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client7-year Uint32-False] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_view_json [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-ForceBlocks] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 >> test.py::test[join-anyjoin_common_nodup-off-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodup-off-Results] >> test.py::test[join-anyjoin_common_nodup-off-Results] [SKIPPED] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct [GOOD] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/py3test >> test_split_merge.py::TestSplitMerge::test_merge_split[table_all_types-pk_types12-all_types12-index12---] [GOOD] |93.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_recursive_remove.py::TestRecursiveRemove::test_various_scheme_objects [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Results] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [GOOD] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client7-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client8-year Int64-False] |94.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[produce-reduce_multi_in_difftype--Results] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> test.py::test[insert-append_sorted--ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted--Results] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-Results] >> test.py::test[join-star_join_semionly_premap-off-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren--Results] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint32 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string |94.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[join-anyjoin_common_nodup-off-Results] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] |94.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[pg-tpcds-q57-default.txt-Results] [GOOD] |94.0%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_table.py::TestExecuteQueryWithParams::test_uint64_and_string [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_list >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] [GOOD] >> test.py::test[table_range-range_over_regexp--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_list [GOOD] >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct >> test.py::test[distinct-distinct_groupby-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_join-default.txt-Results] >> test.py::test[aggregate-group_by_hop_zero_delay--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_zero_delay--Results] >> test.py::test[aggregate-group_by_hop_zero_delay--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--ForceBlocks] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client8-year Int64-False] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_struct [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters >> test.py::test[optimizers-sort_constraint_in_left--Results] [GOOD] |94.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[pg-tpcds-q89-default.txt-ForceBlocks] [GOOD] |94.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client9-year Uint64-False] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] >> test_ydb_table.py::TestExecuteQueryWithParams::test_scan_query_with_parameters [GOOD] >> test.py::test[insert-append_sorted--Results] [GOOD] >> test.py::test[insert-keepmeta_nonstrict_fail--ForceBlocks] >> test.py::test[window-win_with_cur_row--Results] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_scheme_error [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3587910) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |94.1%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[join-join_without_correlation_and_struct_access-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-off-Results] >> test.py::test[join-join_without_correlation_and_struct_access-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o--ForceBlocks] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] >> test.py::test[insert-keepmeta_nonstrict_fail--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_nonstrict_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_proto_fail--ForceBlocks] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-Results] >> test.py::test[table_range-range_over_regexp--Results] [GOOD] >> test.py::test[table_range-range_slash--Results] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client9-year Uint64-False] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client10-year String NOT NULL-True] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [GOOD] >> test.py::test[join-three_equalities_paren--Results] [GOOD] >> test.py::test[join-trivial_view-off-Results] >> test.py::test[insert-keepmeta_proto_fail--ForceBlocks] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-row] [GOOD] >> test.py::test[join-trivial_view-off-Results] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] >> test.py::test[insert-keepmeta_proto_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_view_fail--ForceBlocks] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client10-year String NOT NULL-True] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client11-year String-False] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] >> test.py::test[insert-keepmeta_view_fail--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_view_fail--Results] [GOOD] >> test.py::test[insert-override_view_fail--ForceBlocks] >> test.py::test[join-lookupjoin_inner_1o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o--Results] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> test.py::test[table_range-range_slash--Results] [GOOD] >> test.py::test[table_range-range_tables_with_view--Results] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromJson::test_script_from_file [GOOD] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-row] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] >> test.py::test[pg-tpcds-q49-default.txt-Results] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 09:55:32] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 09:55:34] "GET /foo.txt HTTP/1.1" 200 - >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client11-year String-False] [GOOD] >> test.py::test[insert-override_view_fail--ForceBlocks] [GOOD] >> test.py::test[insert-override_view_fail--Results] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client12-year Utf8-False] >> test.py::test[insert-override_view_fail--Results] [GOOD] >> test.py::test[join-anyjoin_common_dup--ForceBlocks] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v1-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] |94.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [GOOD] |94.1%| [TA] $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scheme.py::TestSchemeDescribe::test_describe_external_table_references_json [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-column] >> test.py::test[join-lookupjoin_inner_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_2o-off-ForceBlocks] >> test.py::test[distinct-distinct_join-default.txt-Results] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Results] [GOOD] >> test.py::test[window-generic/session_aliases--Results] >> test_ydb_impex.py::TestImpex::test_simple[json-additional_args4-column] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client12-year Utf8-False] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-row] >> test.py::test[table_range-range_tables_with_view--Results] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--Results] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client13-year Date-False] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] [GOOD] >> test.py::test[join-yql-19081--Results] >> test.py::test[join-yql-19081--Results] [SKIPPED] >> test.py::test[join-yql-8125--Results] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-ForceBlocks] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-column] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] >> test.py::test[aggregate-group_by_session_nopush--ForceBlocks] >> test.py::test[aggregate-group_by_session_nopush--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_session_nopush--Results] >> test.py::test[aggregate-group_by_session_nopush--Results] [SKIPPED] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-row] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-column] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |94.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[pg-tpcds-q49-default.txt-Results] [GOOD] |94.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[distinct-distinct_join-default.txt-Results] [GOOD] |94.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[aggregate-group_by_session_nopush--Results] [SKIPPED] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-false-client13-year Date-False] [GOOD] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [GOOD] >> test_stop.py::TestStop::test_stop_query[v1-analytics] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-row] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client0-year Int32 NOT NULL-False] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-column] |94.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [GOOD] |94.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test.py::test[table_range-tablepath_with_non_existing--Results] [GOOD] >> test_ydb_backup.py::TestBackupSingleNotNull::test_single_table_backup [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-row] >> test.py::test[join-lookupjoin_semi_2o-off-ForceBlocks] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test.py::test[join-anyjoin_common_dup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_dup--Results] |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-column] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client0-year Int32 NOT NULL-False] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/inspect.py:3076: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client1-year Uint32 NOT NULL-False] >> test_ydb_impex.py::TestImpex::test_delimeter_at_end_of_lines[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-row] |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [GOOD] |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-column] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] [GOOD] |94.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[table_range-tablepath_with_non_existing--Results] [GOOD] |94.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[join-lookupjoin_semi_2o-off-ForceBlocks] [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args0-column] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.csv-csv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-row] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-column] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.4%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client1-year Uint32 NOT NULL-False] [GOOD] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_impex.py::TestImpex::test_excess_columns[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-row] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client2-year Uint64 NOT NULL-False] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_pretty [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-column] |94.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array |94.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[aggregate-group_by_rollup_column_ref--Results] [GOOD] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_base64_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_SECONDS] [GOOD] >> test.py::test[window-generic/session_aliases--Results] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args2-column] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode_array >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-row] >> test.py::test[join-yql-8125--Results] [GOOD] >> test.py::test[join-yql-8125-off-Results] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_yql_script_json_unicode_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_pretty |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test.py::test[join-yql-8125-off-Results] [SKIPPED] >> test.py::test[key_filter-between_with_key_filter--Results] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-column] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_actorsystem.py::TestWithStorageNodeWith16Cpu::test >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_pretty [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64 >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client2-year Uint64 NOT NULL-False] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client3-year Date NOT NULL-False] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64_array |94.5%| [TA] $(B)/ydb/tests/datashard/split_merge/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_base64_array [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode >> test_ydb_impex.py::TestImpex::test_excess_columns[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-row] >> test.py::test[join-anyjoin_common_dup--Results] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-row] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode [GOOD] >> test.py::test[join-anyjoin_common_dup-off-ForceBlocks] >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-column] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode_array >> test_ydb_impex.py::TestImpex::test_excess_columns[json-additional_args4-column] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithFormats::test_stream_yql_script_json_unicode_array [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-row] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MICROSECONDS] [GOOD] |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-column] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_stop.py::TestStop::test_stop_query[v1-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3612455) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] >> test_actorsystem.py::TestWithStorageNodeWith16Cpu::test [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64 >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64_array >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] [GOOD] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--ForceBlocks] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-row] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client3-year Date NOT NULL-False] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-row] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client4-year Utf8 NOT NULL-False] >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-column] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode_array >> test_ydb_impex.py::TestImpex::test_stdin[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-row] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.tsv-tsv_with_names-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[key_filter-between_with_key_filter--Results] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_json_unicode_array [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_csv >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-row] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_csv [GOOD] >> test.py::test[window-udaf_window--Results] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_tsv >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-column] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_data_query_tsv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_pretty >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64 >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client4-year Utf8 NOT NULL-False] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-row] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode_array >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client5-year Int64 NOT NULL-False] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_SECONDS] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_json_unicode_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_csv |94.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[key_filter-between_with_key_filter--Results] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-column] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_tsv >> test_ydb_table.py::TestExecuteQueryWithFormats::test_scan_query_tsv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_pretty >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_pretty [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64 >> test_ydb_impex.py::TestImpex::test_stdin[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-row] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64 [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64_array >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_base64_array [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-column] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode_array >> test.py::test[aggregate-aggregation_with_named_node--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Results] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_json_unicode_array [GOOD] |94.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/split_merge/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_csv >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MICROSECONDS] [GOOD] >> test_ydb_impex.py::TestImpex::test_stdin[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-row] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_csv [GOOD] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_tsv >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] >> test_ydb_table.py::TestExecuteQueryWithFormats::test_read_table_tsv [GOOD] >> test.py::test[join-anyjoin_common_dup-off-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_dup-off-Results] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-column] >> test.py::test[join-anyjoin_common_dup-off-Results] [SKIPPED] >> test.py::test[join-inner_grouped-off-ForceBlocks] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client5-year Int64 NOT NULL-False] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client6-year Int32-False] >> test_recovery.py::TestRecovery::test_program_state_recovery |94.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-row] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-column] >> test.py::test[aggregate-aggregation_with_named_node--Results] [GOOD] >> test.py::test[aggregate-compare_by--ForceBlocks] >> test_actorsystem.py::TestWithStorageNodeWith17Cpu::test >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.json-json_each_row-UNIX_TIME_MILLISECONDS] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] >> test.py::test[window-udaf_window--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part--Results] >> test_ydb_impex.py::TestImpex::test_multiple_files[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-row] >> test_statistics.py::TestS3::test_egress[v1-client0-json_list] >> test_row_dispatcher.py::TestPqRowDispatcher::test_sensors >> test_select_1.py::TestSelect1::test_select_z_x_y[v1] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-column] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v1] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client6-year Int32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client7-year Uint32-False] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-row] >> test_row_dispatcher.py::TestPqRowDispatcher::test_metadatafields >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d88/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000d88/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk3/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3149994 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_SECONDS] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith17Cpu::test [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-row] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> test.py::test[join-inner_grouped-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped-off-Results] >> test.py::test[join-inner_grouped-off-Results] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-column] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client7-year Uint32-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client8-year Int64-False] >> test_ydb_impex.py::TestImpex::test_multiple_files[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-row] >> test_select_1.py::TestSelect1::test_unwrap_null[v1] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-finished] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-column] >> test_2_selects_limit.py::TestSelectLimit::test_select_same[v1] >> test_disposition.py::TestContinueMode::test_disposition_from_time[v1-mvp_external_ydb_endpoint0] |94.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[join-inner_grouped-off-Results] [SKIPPED] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_2_selects_limit.py::TestSelectLimit::test_select_same[v1] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-row] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MICROSECONDS] [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_csv[sql] >> test.py::test[window-win_func_aggr_4func_no_part--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-column] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_tsv[sql] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client8-year Int64-False] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_simple_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_raw[sql] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client9-year Uint64-False] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_json[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[csv-additional_args1-column] [GOOD] >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-row] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_csv[sql] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding[v1] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_tsv[sql] >> test_2_selects_limit.py::TestSelectLimit::test_select_sequence[v1] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-column] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_stdin_par_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_json[sql] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_csv[sql] >> test_format_setting.py::TestS3::test_timestamp_unix_time_insert[v2-timestamp/unix_time/test.parquet-parquet-UNIX_TIME_MILLISECONDS] [GOOD] >> test.py::test[aggregate-compare_by--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by--Results] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_tsv[sql] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] >> test_mem_alloc.py::TestMemAlloc::test_hop_alloc[v1] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args2-column] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_raw[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-row] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_framing_newline_delimited_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_raw[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-column] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_json[sql] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [GOOD] >> test_mem_alloc.py::TestMemAlloc::test_hop_alloc[v1] [SKIPPED] >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_csv[sql] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client9-year Uint64-False] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client10-year String NOT NULL-True] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_tsv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_full_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_raw[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-row] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_raw[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_json[sql] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] [GOOD] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v1] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_csv[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-row] [GOOD] >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-column] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client10-year String NOT NULL-True] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client11-year String-False] >> test_statistics.py::TestS3::test_egress[v1-client0-json_list] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_tsv[sql] >> test_statistics.py::TestS3::test_egress[v1-client0-json_each_row] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_batching_adaptive_tsv[sql] [GOOD] >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] [SKIPPED] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_json[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_stdin[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-row] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_json[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_csv[sql] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_tsv[sql] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] >> test_actorsystem.py::TestWithStorageNodeWith18Cpu::test >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-column] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_ignore_excess_parameters_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_csv[sql] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_tsv[sql] >> test_statistics.py::TestS3::test_egress[v1-client0-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-csv_with_names] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_bad_header_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_csv[sql] >> test.py::test[aggregate-compare_by--Results] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-ForceBlocks] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_csv[sql] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args0-column] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_tsv[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-row] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_columns_no_header_tsv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_csv[sql] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-column] >> test_ydb_backup.py::TestBaseSingleFromDifPlaces::test_single_table_backup_from_different_places >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_csv[sql] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_tsv[sql] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client11-year String-False] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.json-json_each_row] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client12-year Utf8-False] >> test_ydb_sql.py::TestExecuteSqlWithParamsFromStdin::test_skip_rows_tsv[sql] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_egress[v1-client0-parquet] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-row] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [GOOD] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] >> test_select_1.py::TestSelect1::test_unwrap_null[v1] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-column] >> test_statistics.py::TestS3::test_egress[v1-client0-parquet] [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-json_list] >> test_actorsystem.py::TestWithStorageNodeWith18Cpu::test [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.parquet-parquet] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-row] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_missing_fields [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_mem_alloc.py::TestMemAlloc::test_join_alloc[v1] [SKIPPED] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3705128) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client12-year Utf8-False] [GOOD] >> test_select_1.py::TestSelect1::test_select_z_x_y[v1] [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-column] >> test_recovery.py::TestRecovery::test_recovery >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming[v1] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] >> test_recovery.py::TestRecovery::test_program_state_recovery [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_ydb_impex.py::TestImpex::test_multiple_files_and_columns_opt[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-row] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-analytics-finished] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt-Results] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v1-common/simple_format/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] >> test_ydb_backup.py::TestBaseSingleFromDifPlaces::test_single_table_backup_from_different_places [GOOD] >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate >> test_row_dispatcher.py::TestPqRowDispatcher::test_metadatafields [GOOD] >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] [GOOD] >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [GOOD] >> test_select_1.py::TestSelect1::test_select_1[v1] >> test.py::test[aggregate-group_by_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-ForceBlocks] >> test_statistics.py::TestS3::test_egress[v2-client0-json_list] [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-json_each_row] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-column] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_bad_syntax.py::TestBadSyntax::test_type_as_column[v1] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] >> test_invalid_consumer.py::TestConsumer::test_invalid[v1] >> test_statistics.py::TestS3::test_egress[v2-client0-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-csv_with_names] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.json-json_each_row] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] [GOOD] >> test_disposition.py::TestContinueMode::test_disposition_from_time[v1-mvp_external_ydb_endpoint0] [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] |94.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_static-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_ru--ForceBlocks] >> test_statistics.py::TestS3::test_egress[v2-client0-csv_with_names] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint32 |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_statistics.py::TestS3::test_egress[v2-client0-parquet] >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types >> test_actorsystem.py::TestWithStorageNodeWith19Cpu::test >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_statistics.py::TestS3::test_egress[v2-client0-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_list] >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.parquet-parquet] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-simple] |94.8%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/test-results/pytest/{meta.json ... results_accumulator.log} >> test_row_dispatcher.py::TestPqRowDispatcher::test_sensors [GOOD] >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming[v1] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] [GOOD] >> test.py::test[window-win_func_rank_by_all--Results] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint64_and_string >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_list >> test_actorsystem.py::TestWithStorageNodeWith19Cpu::test [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_struct >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_struct [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_multiple_files >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-column] >> test_format_setting.py::TestS3::test_timestamp_simple_format_insert[v2-common/simple_format/test.parquet-parquet] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-streaming] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_multiple_files [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_ignore_excess_parameters >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_ignore_excess_parameters [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_script_from_file >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_script_from_file [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint32 >> test_select_1.py::TestSelect1::test_select_1[v1] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint64_and_string >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_optional >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_list |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_struct >> test.py::test[aggregate-group_by_mul_gs_ru--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_struct [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_list] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_multiple_files >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_each_row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_multiple_files [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_ignore_excess_parameters >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_ignore_excess_parameters [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_script_from_file >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromJson::test_stream_script_from_file [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.csv-csv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [GOOD] >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith1Cpu::test >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] [GOOD] >> test_invalid_consumer.py::TestConsumer::test_invalid[v1] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] >> test_ydb_impex.py::TestImpex::test_big_dataset[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-row] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[data] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.json-json_each_row] >> test_ydb_sql.py::TestExecuteSqlFromStdinWithWideOutput::test_wide_table [GOOD] |94.8%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[window-win_func_rank_by_all--Results] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] >> test_public_metrics.py::TestPublicMetrics::test_select_limit[v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_3_selects.py::TestSelects::test_3_selects[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3702710) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] [GOOD] >> test.py::test[aggregate-percentiles_ungrouped--ForceBlocks] >> test_ydb_backup.py::TestRecursiveNonConsistent::test_recursive_table_backup_from_different_places >> test_select_1.py::TestSelect1::test_select_pg[v1] |94.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-csv_with_names] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_from_binding_date_time[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3693853) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith1Cpu::test [GOOD] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-analytics] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.json-json_each_row] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.parquet-parquet] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-column] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint32[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[data] >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[scan] >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_uint64_and_string[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_list[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_struct[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[data] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v1-common/simple_format/test.parquet-parquet] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[scan] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_multiple_files[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[scan] >> test_select_1.py::TestSelect1::test_select_1[v2] >> test_bad_syntax.py::TestBadSyntax::test_type_as_column[v1] [GOOD] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-simple] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_ignore_excess_parameters[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[data] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[scan] >> test.py::test[aggregate-percentiles_ungrouped--ForceBlocks] [GOOD] >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-parquet] >> test_ydb_table.py::TestExecuteQueryWithParamsFromJson::test_script_from_file[scan] [GOOD] >> test.py::test_run_determentistic[column] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-row] >> test_ydb_backup.py::TestRecursiveNonConsistent::test_recursive_table_backup_from_different_places [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_ast_in_failed_query_runtime[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3698409) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_recovery.py::TestRecovery::test_recovery [GOOD] |94.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[aggregate-percentiles_ungrouped--ForceBlocks] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.csv-csv_with_names] [GOOD] >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_delete_read_rules_after_abort_by_system.py::TestDeleteReadRulesAfterAbortBySystem::test_delete_read_rules_after_abort_by_system >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test.py::test_plans[column] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test.py::test_run_benchmark[generic-row] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_limit.py::TestSelectLimit::test_select_limit[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3702212) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_big_state.py::TestBigState::test_gt_8mb[v1] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.tsv-tsv_with_names] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-Results] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.json-json_each_row] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_statistics.py::TestS3::test_convert[v1-client0-json_list-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_list] |94.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] [GOOD] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-column] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] |95.0%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_select_1.py::TestSelect1::test_select_pg[v1] [GOOD] >> test_public_metrics.py::TestPublicMetrics::test_select_limit[v1] [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] |95.0%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/py3test >> test_actorsystem.py::TestWithStorageNodeWith20Cpu::test [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case4[v1] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.json-json_each_row] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_explicit_partitioning_0.py::TestS3::test_projection_enum_type_invalid_validation[v1-true-client13-year Date-False] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ccc/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ccc/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_explicit_partitioning_0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3438338) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3443600 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.parquet-parquet] |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |95.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[window-win_lead_in_mem-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Results] >> test_select_1.py::TestSelect1::test_select_1[v2] [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] [GOOD] >> test.py::test_run_determentistic[row] |95.0%| [TA] $(B)/ydb/tests/functional/autoconfig/test-results/py3test/{meta.json ... results_accumulator.log} >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-row] |95.0%| [TA] {RESULT} $(B)/ydb/tests/functional/autoconfig/test-results/py3test/{meta.json ... results_accumulator.log} |95.0%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part10/test-results/pytest/{meta.json ... results_accumulator.log} >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable |95.1%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test_run_benchmark[scan-column] |95.1%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_each_row] >> test.py::test_plans[row] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-with_created_read_rules-modify-streaming] [GOOD] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_optional [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] >> test.py::test_run_benchmark[scan-row] >> test_format_setting.py::TestS3::test_date_time_simple_format_insert[v2-common/simple_format/test.parquet-parquet] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_posix/big.csv-csv_with_names-POSIX] >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_z_x_y[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3698174) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [GOOD] |95.1%| [TA] $(B)/ydb/tests/functional/suite_tests/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_filter_use_unsupported_predicate [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3694222) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_posix/big.csv-csv_with_names-POSIX] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] >> test.py::test_run_benchmark[generic-column] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-column] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] |95.1%| [TA] {RESULT} $(B)/ydb/tests/functional/suite_tests/test-results/py3test/{meta.json ... results_accumulator.log} >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed >> test.py::test[window-win_multiaggr_list-default.txt-Results] [GOOD] >> test.py::test[window-win_over_few_partitions--Results] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-json_each_row] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-csv_with_names] |95.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax [GOOD] >> test.py::test_plans[column] [GOOD] |95.1%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part10/test-results/pytest/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-date_time/simple_iso/big.csv-csv_with_names-ISO] >> test_select_1.py::TestSelect1::test_select_pg[v2] >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_require_as[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3694979) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |95.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_yq_streaming_read_date_time_format[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3710738) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-analytics] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v1-date_time/simple_iso/big.csv-csv_with_names-ISO] [GOOD] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_posix/big.csv-csv_with_names-POSIX] >> test_ydb_backup.py::TestRecursiveSchemeOnly::test_recursive_table_backup_from_different_places >> test_ydb_impex.py::TestImpex::test_big_dataset[json-additional_args4-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_format_parquet[row] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_format_parquet[column] [SKIPPED] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-row] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_plans[column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_disposition.py::TestContinueMode::test_disposition_oldest[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3702654) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:57:22] send response localhost:15711/?database=local ::1 - - [05/May/2025 09:57:22] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:57:43] send response localhost:15711/?database=local ::1 - - [05/May/2025 09:57:43] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-csv_with_names] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-column] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-parquet] >> test.py::test_plans[row] [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTableWithChangefeed >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case4[v1] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args0-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-row] >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables |95.2%| [TA] $(B)/ydb/tests/functional/sqs/multinode/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:127:2058] recipient: [1:109:2141] 2025-05-05T09:57:49.260136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:49.260163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:49.260169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:49.260175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:49.260181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:49.260185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:49.260194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:49.260208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:49.260294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:49.260364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:49.274625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:49.274660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:49.274753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon 2025-05-05T09:57:49.280069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:49.280150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:49.280184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:49.284759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:49.284812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:49.284925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:49.284987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:49.285558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:49.285827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:49.285837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:49.285869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:49.285879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:49.285886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:49.285906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.287075Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2152] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-05T09:57:49.307463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:49.307547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.307616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:49.307698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:49.307712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.308526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:49.308556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:49.308630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.308640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:49.308645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:49.308651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:49.309064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.309077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:49.309083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:49.309437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.309448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.309454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:49.309461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:49.310160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:49.310586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:49.310632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:49.310836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:49.310861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:49.310871Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:49.310953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:49.310962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:49.310994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:49.311006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:49.311400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:49.311408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:49.311457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:49.311462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:49.311541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.311550Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:57:49.311563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:49.311568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:49.311575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:49.311579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:49.311584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:57:49.311590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:49.311596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:57:49.311600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:57:49.311611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path ... e 2025-05-05T09:57:54.692079Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-05-05T09:57:54.692111Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000007 2025-05-05T09:57:54.692282Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:54.692307Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 68719478893 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:54.692317Z node 16 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000007, at schemeshard: 72057594046678944 2025-05-05T09:57:54.692341Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-05-05T09:57:54.692373Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 REQUEST: PUT /table/metadata.json HTTP/1.1 HEADERS: Host: localhost:65019 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 08F79D86-3C09-4FDD-8CAE-E2EC2888ABCD amz-sdk-request: attempt=1 content-length: 73 content-md5: oBd372HtOJ3JW3N2b2gUVA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/metadata.json / / 73 2025-05-05T09:57:54.698545Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:54.698566Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T09:57:54.698670Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:54.698679Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:208:2210], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-05-05T09:57:54.698880Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.698892Z node 16 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:54.699210Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T09:57:54.699235Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T09:57:54.699240Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-05-05T09:57:54.699246Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-05-05T09:57:54.699252Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-05T09:57:54.699276Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /table/permissions.pb HTTP/1.1 HEADERS: Host: localhost:65019 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01BF98A1-0704-48DD-8D91-6E4092F380DC amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/permissions.pb / / 43 2025-05-05T09:57:54.700430Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 TestWaitNotification wait txId: 1004 2025-05-05T09:57:54.700511Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T09:57:54.700520Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T09:57:54.700617Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T09:57:54.700624Z node 16 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1004, at schemeshard: 72057594046678944 REQUEST: PUT /table/scheme.pb HTTP/1.1 HEADERS: Host: localhost:65019 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 22159036-71CB-4C2B-BA82-8A452F9A0058 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/scheme.pb / / 355 REQUEST: PUT /table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:65019 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 99E10865-582C-40F4-B0F8-29E2B791F0C1 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/data_00.csv / / 0 2025-05-05T09:57:54.705617Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 487 RawX2: 68719479193 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:57:54.705639Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-05T09:57:54.705666Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 487 RawX2: 68719479193 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:57:54.705683Z node 16 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 487 RawX2: 68719479193 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:57:54.705697Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:54.705702Z node 16 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.705707Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T09:57:54.705714Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-05-05T09:57:54.705762Z node 16 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:54.706369Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.706485Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.706497Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-05T09:57:54.706512Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T09:57:54.706518Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:57:54.706523Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T09:57:54.706528Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:57:54.706533Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-05T09:57:54.706554Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:125:2151] message: TxId: 281474976710759 2025-05-05T09:57:54.706567Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:57:54.706573Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-05T09:57:54.706579Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-05-05T09:57:54.706615Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-05T09:57:54.707352Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-05T09:57:54.707372Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-05-05T09:57:54.707384Z node 16 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:57:54.707390Z node 16 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710759 2025-05-05T09:57:54.707396Z node 16 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710759, id# 1004, itemIdx# 1 2025-05-05T09:57:54.707896Z node 16 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:57:54.707918Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T09:57:54.707926Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [16:629:2587] TestWaitNotification: OK eventTxId 1004 >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_posix/big.csv-csv_with_names-POSIX] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-column] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null [GOOD] |95.2%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/test-results/py3test/{meta.json ... results_accumulator.log} >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTables >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-row] >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-row] [GOOD] >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:46.466898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:46.466953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:46.466959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:46.466964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:46.466970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:46.466975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:46.466984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:46.466998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:46.467095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:46.467176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:46.479554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:46.479581Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:46.479679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:46.482028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:46.482076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:46.482107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:46.483436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:46.483506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:46.483631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.483712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:46.484199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.484508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:46.484519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.484569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:46.484576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:46.484583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:46.484610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:46.486232Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:46.506200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:46.506305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.506384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:46.506460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:46.506473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.507527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.507560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:46.507637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.507649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:46.507654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:46.507660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:46.508206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.508221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:46.508227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:46.508832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.508852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.508858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.508866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:46.509528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:46.510095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:46.510160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:46.510383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.510411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:46.510419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.510481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:46.510489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.510523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:46.510535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:46.511058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:46.511067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:46.511119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.511124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:46.511134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.511141Z node 1 :FLAT_TX_SCHEMESHARD I ... 94046678944, txId: 281474976710758 2025-05-05T09:57:55.996416Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-05T09:57:55.996421Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:57:55.996777Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:57:55.996801Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:57:55.996806Z node 37 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:57:55.996811Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T09:57:55.996833Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:57:55.996850Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T09:57:55.997093Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:57:55.997302Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T09:57:55.997313Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T09:57:55.997319Z node 37 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T09:57:55.997453Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-05T09:57:55.997480Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-05-05T09:57:55.997570Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-05T09:57:55.997824Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:55.997848Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 158913792107 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:55.997856Z node 37 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-05T09:57:55.997881Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.997890Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:57:55.997894Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:57:55.997899Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:57:55.997902Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:57:55.997910Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:57:55.997919Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:57:55.997925Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-05T09:57:55.997932Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:57:55.997936Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T09:57:55.997940Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T09:57:55.997949Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:57:55.997954Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-05T09:57:55.997958Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-05-05T09:57:55.997961Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T09:57:55.998105Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-05T09:57:55.998531Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:55.998562Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:55.998596Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:57:55.998620Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:55.998625Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:204:2206], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-05T09:57:55.998630Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:204:2206], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2025-05-05T09:57:55.998765Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:57:55.998778Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:57:55.998782Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:57:55.998787Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T09:57:55.998791Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:57:55.998845Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:57:55.998854Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:57:55.998858Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:57:55.998861Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T09:57:55.998865Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:57:55.998875Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-05T09:57:55.998879Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [37:125:2151] 2025-05-05T09:57:55.998932Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:57:55.998938Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T09:57:55.998948Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:57:55.999357Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:57:55.999676Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:57:55.999700Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T09:57:55.999710Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T09:57:55.999719Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:57:55.999723Z node 37 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-05T09:57:55.999728Z node 37 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-05T09:57:55.999781Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:57:56.000094Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T09:57:56.000144Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T09:57:56.000152Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T09:57:56.000215Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T09:57:56.000229Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T09:57:56.000234Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [37:391:2380] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_plans[row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-row] >> TExportToS3Tests::DropCopiesBeforeTransferring1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_nested_types [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3699047) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-column] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] |95.3%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-common/simple_format/big.csv-csv_with_names-%Y-%m-%d] [GOOD] >> test_ydb_backup.py::TestRecursiveSchemeOnly::test_recursive_table_backup_from_different_places [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-json_each_row-parquet] [GOOD] >> test.py::test[window-win_over_few_partitions--Results] [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[tsv-additional_args3-column] [GOOD] >> test_select_1.py::TestSelect1::test_select_pg[v2] [GOOD] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TExportToS3Tests::DropSourceTableBeforeTransferring >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_list] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-date_time/simple_iso/big.csv-csv_with_names-ISO] >> test.py::test[window-win_over_few_partitions_other--Results] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-row] >> TExportToS3Tests::CorruptedDyNumber >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-row] [GOOD] |95.3%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-column] >> test_format_setting.py::TestS3::test_date_time_simple_posix_big_file[v2-date_time/simple_iso/big.csv-csv_with_names-ISO] [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_list] [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView [GOOD] >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] [GOOD] >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_each_row] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-yql_syntax-client0] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView [GOOD] >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-json_each_row] [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> test_ydb_impex.py::TestImpex::test_import_file_with_bom[json-additional_args4-column] [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::UserSID >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-row] >> TExportToS3Tests::DisableAutoDropping >> TExportToS3Tests::EnableChecksumsPersistance >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-yql_syntax-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-csv_with_names] >> TExportToS3Tests::DisableAutoDropping [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-row] [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-pg_syntax-client0] >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TExportToS3Tests::UserSID [GOOD] |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_1[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3713387) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/0001e5/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_select_1.py.TestSelect1.test_select_1.v1/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/0001e5/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk35/testing_out_stuff/test_select_1.py.TestSelect1.test_select_1.v2/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-column] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-yql_types-pg_syntax-client0] [SKIPPED] >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-analytics] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-csv_with_names] [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[scripting] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-parquet] >> test.py::test_run_benchmark[generic-row] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[data] >> test.py::test_run_determentistic[row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args0-column] [GOOD] >> TExportToS3Tests::TablePermissions >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::EncryptedExport >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-row] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-yql_syntax-client0] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-row] [GOOD] >> TExportToS3Tests::TablePermissions [GOOD] >> TExportToS3Tests::EncryptedExport [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-yql_syntax-client0] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-pg_syntax-client0] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_simple_not_null [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3698146) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_kill_pq_bill.py::TestKillPqBill::test_do_not_bill_pq[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3714900) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:57:50] send response localhost:17681/?database=local ::1 - - [05/May/2025 09:57:50] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/0001b1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk9/testing_out_stuff/test_kill_pq_bill.py.TestKillPqBill.test_do_not_bill_pq.v1-mvp_external_ydb_endpoint0/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback |95.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] |95.3%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |95.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_sql.py::TestExecuteSqlWithPgSyntax::test_pg_syntax [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/py3test >> test_dml.py::TestDML::test_dml[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] Test command err: contrib/python/ydb/py3/ydb/types.py:59: DeprecationWarning: datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version. Use timezone-aware objects to represent datetimes in UTC: datetime.datetime.fromtimestamp(timestamp, datetime.UTC). >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v1-pg_types-pg_syntax-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-csv_with_names-parquet] [GOOD] >> test_ydb_backup.py::TestRecursiveConsistent::test_recursive_table_backup_from_different_places >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[scripting] [GOOD] >> test_delete_read_rules_after_abort_by_system.py::TestDeleteReadRulesAfterAbortBySystem::test_delete_read_rules_after_abort_by_system [GOOD] >> test.py::test_run_benchmark[scan-row] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery.py::TestRecovery::test_program_state_recovery_error_if_no_states [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3695346) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EncryptedExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:57:59.405150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:59.405172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:59.405178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:59.405183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:59.405196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:59.405200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:59.405210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:59.405224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:59.405311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:59.405377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:59.419656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:57:59.419677Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:59.423693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:59.424467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:59.424553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:59.432894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:59.432988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:59.433105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:59.435013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:59.436916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:59.437327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:59.437344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:59.437369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:59.437378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:59.437384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:59.437430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.442954Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:57:59.471212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:59.471288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.471354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:59.471400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:59.471408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.472108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:59.472135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:59.472181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.472190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:59.472194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:59.472198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:59.472627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.472642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:59.472664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:59.473026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.473034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.473038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:59.473042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:59.473658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:59.474066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:59.474102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:59.474268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:59.474295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:59.474313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:59.474369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:59.474377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:59.474400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:59.474411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:59.474944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:59.474953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:59.475002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:59.475008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:59.475084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.475092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:57:59.475103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:59.475107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:59.475112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:59.475115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:59.475120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:57:59.475126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:59.475130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:57:59.475134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:57:59.475146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:57:59.475152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:57:59.475156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:57:59.475452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:57:59.475473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 4046678944 2025-05-05T09:58:01.728788Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-05T09:58:01.728797Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-05-05T09:58:01.728831Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:01.728925Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.728938Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.728942Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T09:58:01.728947Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-05-05T09:58:01.728953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:01.729195Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.729207Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.729211Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T09:58:01.729215Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-05-05T09:58:01.729219Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-05T09:58:01.729234Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T09:58:01.729924Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:01.729964Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T09:58:01.729971Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T09:58:01.729976Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T09:58:01.730191Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-05T09:58:01.730226Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-05-05T09:58:01.730331Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:01.730361Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:01.730372Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-05-05T09:58:01.730403Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-05T09:58:01.730414Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T09:58:01.730418Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T09:58:01.730424Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T09:58:01.730427Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T09:58:01.730438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:01.730449Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T09:58:01.730455Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-05T09:58:01.730462Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T09:58:01.730466Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-05T09:58:01.730470Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-05-05T09:58:01.730480Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-05T09:58:01.730485Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-05T09:58:01.730489Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T09:58:01.730493Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T09:58:01.730641Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.730676Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.731208Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:01.731223Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:01.731271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:58:01.731308Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:01.731314Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-05T09:58:01.731319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-05T09:58:01.731511Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.731548Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.731554Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T09:58:01.731559Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T09:58:01.731564Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:01.731706Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.731719Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.731723Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T09:58:01.731727Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T09:58:01.731732Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T09:58:01.731743Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-05T09:58:01.731748Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-05-05T09:58:01.732792Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.732860Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T09:58:01.732878Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-05T09:58:01.732892Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-05-05T09:58:01.732902Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:01.732908Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-05T09:58:01.732913Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-05-05T09:58:01.733383Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:01.733403Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-05T09:58:01.733409Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:1120:2999] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:57:58.693758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:58.693785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:58.693791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:58.693796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:58.693808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:58.693812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:58.693822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:58.693836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:58.693932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:58.694006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:58.706857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:57:58.706881Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:58.711148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:58.711549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:58.711624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:58.713436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:58.713504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:58.713606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:58.713904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:58.715310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:58.715668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:58.715681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:58.715703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:58.715711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:58.715718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:58.715759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.717357Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:57:58.737203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:58.737291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.737367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:58.737419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:58.737430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.738163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:58.738191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:58.738244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.738254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:58.738260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:58.738265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:58.738628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.738639Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:58.738658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:58.739005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.739018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.739023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:58.739030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:58.739681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:58.741070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:58.741116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:58.741311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:58.741339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:58.741358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:58.741414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:58.741422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:58.741458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:58.741471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:58.741971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:58.741980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:58.742021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:58.742027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:58.742089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.742096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:57:58.742108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:58.742113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:58.742118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:58.742121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:58.742125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:57:58.742130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:58.742135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:57:58.742138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:57:58.742151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:57:58.742157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:57:58.742161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:57:58.742469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:57:58.742502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 025-05-05T09:58:00.823836Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-05-05T09:58:00.823862Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 281474976710765:0, left await: 0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.823868Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 3 -> 128 2025-05-05T09:58:00.829985Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.830065Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.830077Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:00.830095Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710765 ready parts: 1/1 2025-05-05T09:58:00.830143Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976710765 MinStep: 5000010 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:00.830865Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2025-05-05T09:58:00.830935Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 281474976710765 at step: 5000010 2025-05-05T09:58:00.831150Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:00.831182Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:00.831192Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 HandleReply TEvOperationPlan, stepId: 5000010, at schemeshard: 72057594046678944 2025-05-05T09:58:00.831234Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 128 -> 129 2025-05-05T09:58:00.831288Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 REQUEST: PUT /Backup2/metadata.json HTTP/1.1 HEADERS: Host: localhost:5102 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 633B1CBB-BC99-4F42-ACF4-3D729F811E26 amz-sdk-request: attempt=1 content-length: 73 content-md5: 5UnTthDw7DG9u0TfCJZu+w== content-type: binary/octet-streamFAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000010 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/metadata.json / / 73 2025-05-05T09:58:00.836608Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:00.836627Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-05-05T09:58:00.836722Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:00.836731Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 7 2025-05-05T09:58:00.836873Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.836886Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:00.837229Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-05-05T09:58:00.837251Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-05-05T09:58:00.837256Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2025-05-05T09:58:00.837263Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-05-05T09:58:00.837270Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-05-05T09:58:00.837290Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710765 REQUEST: PUT /Backup2/scheme.pb HTTP/1.1 HEADERS: Host: localhost:5102 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FA89BBFB-D2A9-4C4E-928A-2366C27337DC amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/scheme.pb / / 355 2025-05-05T09:58:00.838016Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: PUT /Backup2/data_00.csv HTTP/1.1 HEADERS: Host: localhost:5102 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B0DD0E15-4B93-4EBE-9B0E-47ED65225563 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/data_00.csv / / 0 2025-05-05T09:58:00.840627Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 815 RawX2: 17179871934 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:00.840647Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-05-05T09:58:00.840672Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 815 RawX2: 17179871934 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:00.840687Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 815 RawX2: 17179871934 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:00.840701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:00.840706Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.840711Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-05-05T09:58:00.840718Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 129 -> 240 2025-05-05T09:58:00.840761Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:00.841261Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.841340Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.841351Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-05-05T09:58:00.841364Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-05-05T09:58:00.841369Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-05T09:58:00.841374Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-05-05T09:58:00.841378Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-05T09:58:00.841383Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-05-05T09:58:00.841398Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710765 2025-05-05T09:58:00.841405Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-05-05T09:58:00.841412Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2025-05-05T09:58:00.841417Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710765:0 2025-05-05T09:58:00.841445Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-05-05T09:58:00.841881Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-05-05T09:58:00.841895Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710765 2025-05-05T09:58:00.842337Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-05-05T09:58:00.842348Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [4:847:2774] TestWaitNotification: OK eventTxId 104 >> TExportToS3Tests::CheckItemProgress >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[data] [GOOD] >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] >> test.py::test_run_determentistic[column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[csv-additional_args1-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-row] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-yql_syntax-client0] >> test_ydb_backup.py::TestRecursiveConsistent::test_recursive_table_backup_from_different_places [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[stream] >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[scan] >> TExportToS3Tests::CancelledExportEndTime >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-yql_syntax-client0] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_list] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[scripting] >> TExportToS3Tests::CheckItemProgress [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_json[scan] [GOOD] >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-row] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-pg_syntax-client0] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_list] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[data] >> TExportToS3Tests::Checksums >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-column] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_each_row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[data] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-yql_types-pg_syntax-client0] [SKIPPED] >> TExportToS3Tests::Checksums [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args2-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-row] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[scan] >> TExportToS3Tests::ChecksumsWithCompression >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-yql_syntax-client0] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-column] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[tsv-additional_args3-column] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-row] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-row] [GOOD] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_csv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_csv[scan] [GOOD] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-yql_syntax-client0] [GOOD] >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[data] >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-pg_syntax-client0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[data] [GOOD] >> TExportToS3Tests::Changefeeds >> test_format_setting.py::TestS3::test_precompute_with_pg_binding[v2-pg_types-pg_syntax-client0] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_simple_tsv[stream] [GOOD] >> TExportToS3Tests::Changefeeds [GOOD] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v1-timestamp/completeness_iso/test.csv-csv_with_names] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_simple_tsv[scan] [GOOD] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v1-timestamp/completeness_iso/test.csv-csv_with_names] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[data] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_timings.py::TestSelectTimings::test_select_timings[v1-streaming-aborted] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3702154) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_determentistic[row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:48.926511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:48.926539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:48.926545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:48.926550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:48.926556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:48.926560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:48.926569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:48.926584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:48.926718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:48.926800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:48.939901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:48.939927Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:48.940022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:48.941868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:48.941909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:48.941933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:48.942874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:48.942939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:48.943069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:48.943127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:48.943644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:48.943909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:48.943930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:48.943967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:48.943975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:48.943997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:48.944026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:48.945535Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:48.964140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:48.964216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.964275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:48.964343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:48.964354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.964993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:48.965020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:48.965086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.965096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:48.965101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:48.965106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:48.965568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.965593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:48.965598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:48.965930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.965939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.965942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:48.965960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:48.966419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:48.966929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:48.966979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:48.967180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:48.967208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:48.967215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:48.967275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:48.967283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:48.967324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:48.967336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:48.967780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:48.967789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:48.967838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:48.967843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:48.967853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.967860Z node 1 :FLAT_TX_SCHEMESHARD I ... 94046678944, txId: 281474976710758 2025-05-05T09:58:00.463551Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-05T09:58:00.463556Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:00.463606Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.463614Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.463618Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:00.463621Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T09:58:00.463625Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:00.463632Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T09:58:00.464264Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:00.467065Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T09:58:00.467084Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T09:58:00.467091Z node 41 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T09:58:00.467133Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-05T09:58:00.467165Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-05T09:58:00.467329Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:00.467357Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 176093661292 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:00.467366Z node 41 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-05T09:58:00.467401Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.467428Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:58:00.467433Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:00.467440Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:58:00.467444Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:00.467458Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:00.467470Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:58:00.467477Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-05T09:58:00.467485Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:00.467490Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T09:58:00.467495Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T09:58:00.467506Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:00.467513Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-05T09:58:00.467518Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-05-05T09:58:00.467522Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T09:58:00.467671Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.467691Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.468267Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:00.468289Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:00.468350Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:58:00.468378Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:00.468384Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-05T09:58:00.468390Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-05T09:58:00.468538Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.468551Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.468556Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:00.468561Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T09:58:00.468567Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:00.468695Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.468705Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.468709Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:00.468713Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T09:58:00.468717Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:58:00.468729Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-05T09:58:00.468735Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [41:125:2151] 2025-05-05T09:58:00.468788Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:00.468794Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T09:58:00.468805Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:00.469575Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.469686Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:00.469706Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T09:58:00.469718Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T09:58:00.469730Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:00.469735Z node 41 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-05T09:58:00.469741Z node 41 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-05T09:58:00.469802Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:00.470151Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T09:58:00.470206Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T09:58:00.470227Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T09:58:00.470303Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T09:58:00.470321Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T09:58:00.470326Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:400:2389] TestWaitNotification: OK eventTxId 1003 >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v2-timestamp/completeness_iso/test.csv-csv_with_names] >> test_format_setting.py::TestS3::test_timestamp_completeness_iso[v2-timestamp/completeness_iso/test.csv-csv_with_names] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_raw[stream] [GOOD] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v1-date_time/completeness_iso/test.csv-csv_with_names] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[scan] >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TExportToS3Tests::RebootDuringCompletion >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-json_each_row] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_raw[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[scripting] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-csv_with_names] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v1-date_time/completeness_iso/test.csv-csv_with_names] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[stream] >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v2-date_time/completeness_iso/test.csv-csv_with_names] >> TExportToS3Tests::SchemaMapping >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_json[stream] [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> test_format_setting.py::TestS3::test_date_time_completeness_iso[v2-date_time/completeness_iso/test.csv-csv_with_names] [GOOD] >> TExportToS3Tests::SchemaMapping [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[scan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_select_1.py::TestSelect1::test_select_pg[v2] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3721182) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[generic-row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[scan-row] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_public_metrics.py::TestPublicMetrics::test_select_unlimited[v1-STATS_MODE_FULL] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3720938) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:58:10.878413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:58:10.878436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:10.878440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:58:10.878444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:58:10.878453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:58:10.878456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:58:10.878463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:10.878488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:58:10.878569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:58:10.878634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:58:10.891346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:58:10.891372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:58:10.895236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:58:10.895513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:58:10.895563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:58:10.896737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:58:10.896783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:58:10.896883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:10.897112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:58:10.897768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:10.898003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:10.898015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:10.898032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:58:10.898038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:10.898044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:58:10.898073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:58:10.899347Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:58:10.916601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:58:10.916677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:10.916761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:58:10.916820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:58:10.916833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:10.917651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:10.917684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:58:10.917740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:10.917751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:58:10.917756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:58:10.917762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:58:10.918289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:10.918303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:10.918309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:58:10.918785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:10.918799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:10.918806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:10.918812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:58:10.919498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:10.919989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:58:10.920028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:58:10.920204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:10.920240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:10.920260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:10.920320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:58:10.920335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:10.920367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:58:10.920379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:58:10.920975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:10.920984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:10.921028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:10.921034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:58:10.921096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:10.921105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:58:10.921116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:10.921120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:10.921125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:10.921128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:10.921133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:58:10.921138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:10.921143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:58:10.921147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:58:10.921158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:10.921164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:58:10.921168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:58:10.921498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:58:10.921514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 7594046678944 2025-05-05T09:58:13.336707Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:58:13.336717Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:13.336748Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:13.336865Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.336877Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.336882Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:13.336887Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T09:58:13.336892Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:13.336977Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.336990Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.336994Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:13.336999Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 7 2025-05-05T09:58:13.337003Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-05-05T09:58:13.337011Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:13.337743Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:13.337813Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:13.337819Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:13.337823Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:13.338048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:58:13.338076Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000010 2025-05-05T09:58:13.338144Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:13.338164Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:13.338170Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000010, at schemeshard: 72057594046678944 2025-05-05T09:58:13.338196Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:58:13.338204Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:13.338207Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:13.338211Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:13.338213Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:13.338220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:13.338229Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-05-05T09:58:13.338232Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:58:13.338239Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:13.338242Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:58:13.338245Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:58:13.338251Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-05-05T09:58:13.338255Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:58:13.338258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-05-05T09:58:13.338261Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 9], 18446744073709551615 2025-05-05T09:58:13.338378Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.338388Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.338702Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:13.338709Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:13.338737Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-05-05T09:58:13.338754Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:13.338757Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:58:13.338760Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:58:13.338877Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.338884Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.338887Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:13.338890Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-05-05T09:58:13.338894Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:13.339014Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.339022Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.339027Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:13.339029Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-05-05T09:58:13.339032Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-05-05T09:58:13.339040Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:58:13.339043Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-05-05T09:58:13.339413Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.339611Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:13.339626Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:58:13.339633Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:58:13.339639Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:13.339643Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:58:13.339646Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-05-05T09:58:13.339899Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:13.339912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-05-05T09:58:13.339917Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1383:3173] TestWaitNotification: OK eventTxId 105 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_determentistic[column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[scripting] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> test_format_setting.py::TestS3::test_date_null[v1-date_null/as_default/test.csv] >> TExportToS3Tests::SchemaMappingEncryption >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_json[scan] [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[scripting] [GOOD] >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[data] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> test_format_setting.py::TestS3::test_date_null[v1-date_null/as_default/test.csv] [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[data] [GOOD] >> test_format_setting.py::TestS3::test_date_null[v1-date_null/parse_error/test.csv] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_csv[stream] [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_csv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[data] [GOOD] >> test.py::test_run_benchmark[scan-column] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[scan] >> test_format_setting.py::TestS3::test_date_null[v1-date_null/parse_error/test.csv] [GOOD] >> test_big_state.py::TestBigState::test_gt_8mb[v1] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[stream] >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-csv_with_names] [GOOD] >> test.py::test_run_benchmark[generic-column] [GOOD] |95.5%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_restart_query.py::TestRestartQuery::test_restart_runtime_errors[v1-mvp_external_ydb_endpoint0-streaming] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3721664) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:57:50] send response localhost:21296/?database=local ::1 - - [05/May/2025 09:57:50] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:58:10] send response localhost:21296/?database=local ::1 - - [05/May/2025 09:58:10] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_metrics_cleanup.py::TestCleanup::test_keep[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3595450) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:125:2058] recipient: [1:109:2141] 2025-05-05T09:57:59.149140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:59.149168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:59.149174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:59.149179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:59.149192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:59.149196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:59.149206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:59.149219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:59.149309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:59.149383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:59.162976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:57:59.163005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:59.165680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:59.165715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:59.165756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:59.168996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:59.169070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:59.169174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:59.169242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:59.171460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:59.172019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:59.172035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:59.172119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:59.172129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:59.172136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:59.172159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.173653Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-05T09:57:59.192874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:59.192958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.193038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:59.193092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:59.193104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.193880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:59.193939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:59.193991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.194000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:59.194005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:59.194010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:59.194474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.194485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:59.194489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:59.194843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.194852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.194858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:59.194865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:59.195505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:59.195855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:59.195891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:59.196060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:59.196084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:59.196101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:59.196161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:59.196168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:59.196198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:59.196210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:59.196616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:59.196626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:59.196669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:59.196674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:59.196746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.196753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:57:59.196764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:59.196768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:59.196773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:59.196776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:59.196780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:57:59.196785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:59.196790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:57:59.196794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:57:59.196804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:57:59.196810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:57:59.196814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:57:59.197114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:57:59.197129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 4] TDone opId# 281474976710758:0 ProgressState 2025-05-05T09:58:12.801944Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:58:12.801949Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:12.801954Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:58:12.801957Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:12.801963Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2025-05-05T09:58:12.801980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710758 2025-05-05T09:58:12.801987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:12.801994Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T09:58:12.801999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T09:58:12.802035Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T09:58:12.802041Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-05-05T09:58:12.802488Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T09:58:12.802505Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T09:58:12.803007Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-05-05T09:58:12.814521Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-05T09:58:12.814538Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-05T09:58:12.815595Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:29078" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 EnableChecksums: false EnablePermissions: false } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:58:12.815688Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:12.815724Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:12.815838Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:58:12.815847Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:12.816150Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-05-05T09:58:12.816159Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-05-05T09:58:12.816656Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:12.816703Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-05-05T09:58:12.816759Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-05-05T09:58:12.816767Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-05-05T09:58:12.816836Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:12.816845Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-05-05T09:58:12.816852Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-05-05T09:58:12.816857Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2025-05-05T09:58:12.817730Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-05-05T09:58:12.817747Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-05-05T09:58:12.817889Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:12.817897Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:12.817928Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-05-05T09:58:12.818597Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-05-05T09:58:12.818631Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:12.818636Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:12.818672Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-05-05T09:58:12.818768Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-05-05T09:58:12.818788Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-05-05T09:58:12.819007Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-05-05T09:58:12.819043Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-05-05T09:58:12.819579Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-05-05T09:58:12.819969Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:58:12.819980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:556:2515] TestWaitNotification: OK eventTxId 102 >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_stdin_par_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[data] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/as_default/test.csv] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_stdin_par_tsv[stream] [GOOD] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/as_default/test.csv] [GOOD] >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-parquet] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/parse_error/test.csv] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[scripting] >> test_format_setting.py::TestS3::test_date_null[v2-date_null/parse_error/test.csv] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_mix_json_and_binary[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[scripting] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/as_default/test.csv] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_mix_json_and_binary[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_json[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_csv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_csv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_different_sources_tsv[stream] [GOOD] >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[scan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:58:15.360378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:58:15.360403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:15.360409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:58:15.360415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:58:15.360429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:58:15.360433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:58:15.360443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:15.360457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:58:15.360546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:58:15.360616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:58:15.370986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:58:15.371005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:58:15.374087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:58:15.374371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:58:15.374416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:58:15.375931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:58:15.375988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:58:15.376085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.376336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:58:15.377229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.377512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:15.377521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.377546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:58:15.377551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:15.377555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:58:15.377583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.378848Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:58:15.395576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:58:15.395657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.395734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:58:15.395788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:58:15.395798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.396576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.396599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:58:15.396650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.396660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:58:15.396665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:58:15.396670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:58:15.397038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.397048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:15.397052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:58:15.397339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.397346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.397349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.397354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.397800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:15.398170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:58:15.398206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:58:15.398358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.398378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:15.398392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.398439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:58:15.398452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.398481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:58:15.398491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:58:15.398875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:15.398880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:15.398933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.398940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:58:15.399007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.399013Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:58:15.399022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:15.399024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.399028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:15.399030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.399033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:58:15.399036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.399039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:58:15.399042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:58:15.399050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:15.399055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:58:15.399058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:58:15.399296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:58:15.399310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 74Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:17.093078Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T09:58:17.093081Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:17.093208Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.093215Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.093217Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:17.093220Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T09:58:17.093223Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:17.093229Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T09:58:17.093642Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:17.093700Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T09:58:17.093704Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T09:58:17.093709Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T09:58:17.093850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-05T09:58:17.093873Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-05T09:58:17.093944Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:17.093963Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:17.093969Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-05T09:58:17.093994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-05T09:58:17.094001Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:58:17.094005Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:17.094010Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:58:17.094013Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:17.094021Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:17.094029Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:58:17.094035Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-05T09:58:17.094041Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:17.094045Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T09:58:17.094048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T09:58:17.094059Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:17.094065Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-05T09:58:17.094069Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-05T09:58:17.094072Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T09:58:17.094227Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.094245Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.094559Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:17.094567Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:17.094616Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:58:17.094635Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:17.094638Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-05T09:58:17.094642Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-05T09:58:17.094775Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.094784Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.094787Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:17.094791Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T09:58:17.094793Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:17.094907Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.094936Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.094940Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:17.094944Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T09:58:17.094947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:58:17.094956Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-05T09:58:17.094962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-05-05T09:58:17.095021Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:17.095028Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T09:58:17.095039Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:17.095527Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.095809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:17.095830Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T09:58:17.095842Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T09:58:17.095851Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:17.095855Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-05T09:58:17.095860Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-05-05T09:58:17.095928Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:17.096300Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-05-05T09:58:17.096358Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-05-05T09:58:17.096364Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-05-05T09:58:17.096424Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-05-05T09:58:17.096438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-05T09:58:17.096442Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:547:2506] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:53.592048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:53.592085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:53.592092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:53.592097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:53.592102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:53.592106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:53.592115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:53.592128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:53.592227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:53.592299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:53.605242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:53.605267Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:53.605360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:53.611797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:53.611868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:53.611913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:53.613590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:53.613663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:53.613794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:53.613853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:53.614364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:53.614723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:53.614738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:53.614784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:53.614792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:53.614798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:53.614825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:53.616872Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:53.639370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:53.639444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:53.639493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:53.639558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:53.639568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:53.640177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:53.640199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:53.640250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:53.640258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:53.640263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:53.640268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:53.640670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:53.640683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:53.640688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:53.641199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:53.641216Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:53.641222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:53.641229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:53.642078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:53.642636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:53.642684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:53.642887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:53.642932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:53.642940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:53.642993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:53.643000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:53.643024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:53.643035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:53.643587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:53.643596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:53.643627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:53.643632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:53.643641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:53.643648Z node 1 :FLAT_TX_SCHEMESHARD I ... 94046678944, txId: 281474976710758 2025-05-05T09:58:03.802945Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-05-05T09:58:03.802951Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:03.803101Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.803114Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.803117Z node 40 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:03.803119Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T09:58:03.803122Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:03.803131Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T09:58:03.803458Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:03.803610Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T09:58:03.803615Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-05-05T09:58:03.803619Z node 40 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-05-05T09:58:03.803698Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-05-05T09:58:03.803719Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-05-05T09:58:03.803803Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.803877Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:03.803898Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 171798693988 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:03.803922Z node 40 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-05-05T09:58:03.803948Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-05-05T09:58:03.803959Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:58:03.803964Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:03.803967Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-05-05T09:58:03.803969Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:03.803976Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:03.803983Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:58:03.803986Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-05-05T09:58:03.803991Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-05-05T09:58:03.803995Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-05-05T09:58:03.803998Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-05-05T09:58:03.804006Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:03.804009Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-05-05T09:58:03.804012Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-05-05T09:58:03.804014Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T09:58:03.804267Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-05-05T09:58:03.804628Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:03.804634Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:03.804660Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:58:03.804677Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:03.804681Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:205:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-05-05T09:58:03.804686Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:205:2207], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2025-05-05T09:58:03.804759Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.804767Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.804770Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:03.804773Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T09:58:03.804776Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:03.804832Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.804841Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.804844Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-05-05T09:58:03.804848Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T09:58:03.804851Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:58:03.804861Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-05-05T09:58:03.804865Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [40:126:2151] 2025-05-05T09:58:03.804899Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:03.804902Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T09:58:03.804909Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:03.805239Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.805581Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-05-05T09:58:03.805610Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-05-05T09:58:03.805629Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-05-05T09:58:03.805638Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:03.805643Z node 40 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-05-05T09:58:03.805651Z node 40 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-05-05T09:58:03.805744Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:03.806105Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T09:58:03.806157Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T09:58:03.806164Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T09:58:03.806230Z node 40 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T09:58:03.806244Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T09:58:03.806249Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [40:393:2382] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::DisableAutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:57:57.451173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:57.451198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:57.451203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:57.451208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:57.451221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:57.451224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:57.451234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:57.451247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:57.451336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:57.451406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:57.464642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:57:57.464665Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:57.468506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:57.468857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:57.468907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:57.470159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:57.470206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:57.470299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:57.470589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:57.471340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:57.471618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:57.471627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:57.471646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:57.471653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:57.471659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:57.471693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:57:57.473057Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:57:57.491703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:57.491789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:57.491865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:57.491920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:57.491931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:57.494110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:57.494144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:57.494201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:57.494214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:57.494218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:57.494224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:57.494813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:57.494829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:57.494834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:57.495286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:57.495297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:57.495303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:57.495309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:57.495987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:57.496431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:57.496473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:57.496653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:57.496681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:57.496698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:57.496749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:57.496756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:57.496791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:57.496802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:57.497251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:57.497259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:57.497304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:57.497309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:57.497377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:57.497386Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:57:57.497397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:57.497401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:57.497406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:57.497409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:57.497413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:57:57.497418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:57.497423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:57:57.497427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:57:57.497439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:57:57.497445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:57:57.497449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:57:57.497762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:57:57.497776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 57594046678944 2025-05-05T09:57:59.512533Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.512542Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:57:59.512569Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:59.512764Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.512779Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.512784Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:57:59.512789Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T09:57:59.512794Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:57:59.513130Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.513144Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.513148Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:57:59.513165Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-05-05T09:57:59.513170Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-05T09:57:59.513182Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:57:59.513570Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:57:59.513625Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:57:59.513631Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:57:59.513637Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:57:59.513675Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:57:59.513703Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-05-05T09:57:59.513802Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:59.513824Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:59.513831Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-05-05T09:57:59.513861Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:57:59.513871Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:57:59.513875Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:57:59.513881Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:57:59.513884Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:57:59.513893Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:57:59.513902Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T09:57:59.513908Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:57:59.513914Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:57:59.513918Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:57:59.513922Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:57:59.513932Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-05T09:57:59.513937Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:57:59.513941Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-05T09:57:59.513945Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-05T09:57:59.514363Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.514384Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.514710Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:59.514721Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:59.514762Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-05T09:57:59.514785Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:59.514790Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:57:59.514796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:57:59.514993Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.515005Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.515010Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:57:59.515014Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T09:57:59.515019Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:57:59.515157Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.515167Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.515171Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:57:59.515174Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-05T09:57:59.515178Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T09:57:59.515188Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:57:59.515193Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-05-05T09:57:59.515886Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.515940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:57:59.515955Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:57:59.515966Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:57:59.515974Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:57:59.515979Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:57:59.515984Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-05T09:57:59.516341Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:57:59.516358Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:57:59.516364Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:614:2571] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:56.540261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:56.540291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:56.540311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:56.540316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:56.540322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:56.540326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:56.540336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:56.540349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:56.540444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:56.540522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:56.553864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:56.553887Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:56.553973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:56.558469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:56.558536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:56.558577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:56.560252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:56.560318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:56.560447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:56.560487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:56.561996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:56.562257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:56.562269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:56.562326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:56.562333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:56.562339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:56.562360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:56.564095Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:56.587444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:56.587516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.587570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:56.587639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:56.587652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.588416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:56.588454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:56.588516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.588527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:56.588532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:56.588536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:56.589037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.589052Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:56.589058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:56.589677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.589691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.589697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:56.589713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:56.590358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:56.590934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:56.590977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:56.591174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:56.591207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:56.591215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:56.591274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:56.591281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:56.591307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:56.591318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:56.591822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:56.591832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:56.591874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:56.591879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:56.591889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.591896Z node 1 :FLAT_TX_SCHEMESHARD I ... 8944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.429636Z node 25 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T09:58:03.429641Z node 25 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T09:58:03.429646Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:03.429869Z node 25 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.429912Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.429917Z node 25 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T09:58:03.429921Z node 25 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T09:58:03.429929Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-05T09:58:03.429943Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-05T09:58:03.430579Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-05-05T09:58:03.430613Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000007 2025-05-05T09:58:03.430763Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:03.430785Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 107374184555 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:03.430793Z node 25 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 1004:0, step: 5000007, at schemeshard: 72057594046678944 2025-05-05T09:58:03.430822Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 1004:0, at schemeshard: 72057594046678944 2025-05-05T09:58:03.430832Z node 25 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-05-05T09:58:03.430836Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T09:58:03.430842Z node 25 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-05-05T09:58:03.430845Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T09:58:03.430854Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:03.430864Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-05T09:58:03.430870Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-05T09:58:03.430877Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T09:58:03.430882Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-05-05T09:58:03.430886Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-05-05T09:58:03.430896Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-05T09:58:03.430902Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-05T09:58:03.430906Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-05-05T09:58:03.430954Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-05T09:58:03.431055Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.431636Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.431945Z node 25 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:03.431959Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:03.432004Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T09:58:03.432047Z node 25 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:03.432053Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [25:204:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-05T09:58:03.432057Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [25:204:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 2 FAKE_COORDINATOR: Erasing txId 1004 2025-05-05T09:58:03.432272Z node 25 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.432287Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.432291Z node 25 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T09:58:03.432296Z node 25 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-05-05T09:58:03.432302Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:03.432467Z node 25 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.432481Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.432486Z node 25 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T09:58:03.432490Z node 25 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-05T09:58:03.432493Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-05T09:58:03.432506Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-05T09:58:03.432559Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:03.432565Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-05T09:58:03.432577Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:03.433072Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.433467Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T09:58:03.433505Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 2025-05-05T09:58:03.433605Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-05T09:58:03.433642Z node 25 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 44us result status StatusSuccess 2025-05-05T09:58:03.433741Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "export-1003" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_different_sources_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_json[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_json[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_tsv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_framing_newline_delimited_raw[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_tsv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_raw[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_framing_newline_delimited_raw[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_raw[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_json[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_json[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_csv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_full_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_full_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_raw[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_raw[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_json[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_json[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_csv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_batching_adaptive_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_json[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_batching_adaptive_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_csv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_json[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_ignore_excess_parameters_tsv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_csv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_bad_header_tsv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_ignore_excess_parameters_tsv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_csv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_csv[stream] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[data] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[scripting] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_columns_no_header_tsv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[data] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[scripting] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[scan] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[stream] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_csv[scan] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[data] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[data] [GOOD] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_bad_header_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[scripting] [GOOD] >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_columns_no_header_tsv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_csv[stream] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[scripting] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[scripting] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[stream] >> test_ydb_scripting.py::TestExecuteScriptWithParamsFromStdin::test_skip_rows_tsv[stream] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_read_rules_deletion.py::TestReadRulesDeletion::test_delete_read_rules[v1-with_recovery] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3716722) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:57:58.910986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:58.911012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:58.911017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:58.911022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:58.911032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:58.911036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:58.911045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:58.911054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:58.911127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:58.911185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:58.923961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:57:58.923982Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:58.928083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:58.928409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:58.928455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:58.930064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:58.930121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:58.930219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:58.930500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:58.931600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:58.931910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:58.931923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:58.931941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:58.931949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:58.931956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:58.931989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.934272Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:57:58.951642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:58.951720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.951792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:58.951848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:58.951862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.952797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:58.952827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:58.952887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.952898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:58.952904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:58.952909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:58.953465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.953478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:58.953484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:58.953987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.954007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.954013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:58.954020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:58.954781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:58.955383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:58.955426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:58.955601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:58.955631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:58.955649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:58.955708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:58.955716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:58.955750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:58.955763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:58.956307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:58.956338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:58.956381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:58.956386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:58.956437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:58.956444Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:57:58.956453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:58.956456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:58.956460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:57:58.956462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:58.956465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:57:58.956469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:57:58.956472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:57:58.956475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:57:58.956485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:57:58.956489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:57:58.956492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:57:58.956811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:57:58.956843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... X_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-05-05T09:58:00.329581Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.329616Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.329623Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:00.329636Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:00.329668Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:00.330021Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-05-05T09:58:00.330056Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-05-05T09:58:00.330145Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:00.330167Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:00.330175Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-05-05T09:58:00.330200Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-05-05T09:58:00.330227Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-05-05T09:58:00.338062Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:00.338078Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:58:00.338159Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:00.338164Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-05-05T09:58:00.338307Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.338320Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-05-05T09:58:00.338480Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T09:58:00.338489Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T09:58:00.338493Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-05-05T09:58:00.338497Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-05-05T09:58:00.338502Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-05-05T09:58:00.338520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:7932 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2EB594EA-A4E5-4F39-A979-070C155032E8 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2025-05-05T09:58:00.339199Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:7932 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: ECAAAC04-4EBF-46E6-AE44-834001A91BE6 amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:7932 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 54EC7E00-E1B7-41D2-96AB-D4526192450A amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:7932 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 107693A1-3671-48E7-84E2-619AF52149EB amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-05-05T09:58:00.346101Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 452 RawX2: 17179871605 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:00.346117Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-05T09:58:00.346138Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 452 RawX2: 17179871605 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:00.346149Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 452 RawX2: 17179871605 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:00.346162Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:00.346165Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.346168Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T09:58:00.346173Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-05-05T09:58:00.346209Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:00.346543Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.346613Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:00.346622Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-05T09:58:00.346632Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T09:58:00.346636Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:00.346640Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T09:58:00.346642Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:00.346659Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-05T09:58:00.346671Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710759 2025-05-05T09:58:00.346676Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:00.346680Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-05T09:58:00.346683Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-05-05T09:58:00.346710Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T09:58:00.347096Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-05T09:58:00.347109Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-05-05T09:58:00.347416Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-05-05T09:58:00.347425Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:481:2442] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:49.157546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:49.157597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:49.157604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:49.157609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:49.157614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:49.157619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:49.157627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:49.157640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:49.157734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:49.157809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:49.171087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:49.171114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:49.171202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:49.174107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:49.174150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:49.174201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:49.175610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:49.175671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:49.175803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:49.175852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:49.176403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:49.176662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:49.176674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:49.176720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:49.176728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:49.176735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:49.176763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:49.178558Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:49.199133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:49.199211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.199271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:49.199336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:49.199349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.200232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:49.200264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:49.200336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.200348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:49.200352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:49.200358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:49.201001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.201021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:49.201027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:49.201572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.201586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.201592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:49.201599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:49.202289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:49.202972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:49.203013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:49.203223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:49.203254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:49.203262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:49.203322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:49.203331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:49.203362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:49.203374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:49.204273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:49.204286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:49.204337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:49.204343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:49.204356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:49.204364Z node 1 :FLAT_TX_SCHEMESHARD I ... 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.517632Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.517636Z node 56 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T09:58:07.517644Z node 56 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T09:58:07.517649Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:07.517839Z node 56 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.517852Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.517855Z node 56 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T09:58:07.517859Z node 56 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-05-05T09:58:07.517863Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-05T09:58:07.517875Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-05-05T09:58:07.518560Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-05-05T09:58:07.518584Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000009 2025-05-05T09:58:07.518722Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.518890Z node 56 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:07.518946Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 240518170731 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:07.518953Z node 56 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 1004:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T09:58:07.518980Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 1004:0, at schemeshard: 72057594046678944 2025-05-05T09:58:07.518987Z node 56 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-05-05T09:58:07.518992Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T09:58:07.518996Z node 56 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-05-05T09:58:07.518999Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T09:58:07.519007Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:07.519016Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-05T09:58:07.519022Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-05-05T09:58:07.519028Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-05-05T09:58:07.519035Z node 56 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-05-05T09:58:07.519038Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-05-05T09:58:07.519049Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-05-05T09:58:07.519054Z node 56 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-05-05T09:58:07.519058Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-05T09:58:07.519061Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-05-05T09:58:07.519191Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.519588Z node 56 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:07.519598Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:07.519621Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-05-05T09:58:07.519639Z node 56 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:07.519652Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [56:207:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-05-05T09:58:07.519655Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [56:207:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 2 FAKE_COORDINATOR: Erasing txId 1004 2025-05-05T09:58:07.519764Z node 56 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.519772Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.519775Z node 56 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T09:58:07.519778Z node 56 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-05T09:58:07.519781Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:07.519876Z node 56 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.519884Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.519888Z node 56 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-05-05T09:58:07.519891Z node 56 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-05-05T09:58:07.519895Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-05-05T09:58:07.519909Z node 56 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-05-05T09:58:07.519955Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:07.519961Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-05-05T09:58:07.519970Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:07.520927Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.521026Z node 56 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-05-05T09:58:07.521042Z node 56 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 2025-05-05T09:58:07.521124Z node 56 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-05T09:58:07.521152Z node 56 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 34us result status StatusSuccess 2025-05-05T09:58:07.521246Z node 56 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[scan-column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |95.6%| [TA] $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |95.6%| [TA] $(B)/ydb/tests/datashard/dml/test-results/py3test/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/dml/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/py3test >> test.py::test_run_benchmark[generic-column] [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TExportToS3Tests::AutoDropping >> test_statistics.py::TestS3::test_convert[v1-client0-parquet-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_list] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/as_default/test.csv] [GOOD] >> TExportToS3Tests::AutoDropping [GOOD] |95.7%| [TA] $(B)/ydb/tests/functional/clickbench/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/parse_error/test.csv] |95.7%| [TA] {RESULT} $(B)/ydb/tests/functional/clickbench/test-results/py3test/{meta.json ... results_accumulator.log} >> TExportToS3Tests::RebootDuringAbortion >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:125:2058] recipient: [1:109:2141] 2025-05-05T09:58:15.453959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:58:15.453982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:15.453988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:58:15.453993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:58:15.454003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:58:15.454007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:58:15.454015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:15.454027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:58:15.454110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:58:15.454178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:58:15.465655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:58:15.465676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:58:15.467927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:58:15.467956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:58:15.467983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:58:15.470084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:58:15.470139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:58:15.470210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.470256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:58:15.471161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.471618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:15.471633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.471698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:58:15.471707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:15.471714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:58:15.471732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.472891Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:239:2058] recipient: [1:15:2062] 2025-05-05T09:58:15.492442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:58:15.492503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.492558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:58:15.492605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:58:15.492616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.493211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.493254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:58:15.493292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.493301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:58:15.493305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:58:15.493310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:58:15.493686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.493697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:15.493702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:58:15.494086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.494097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.494102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.494108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.494753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:15.495166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:58:15.495199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:58:15.495361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.495388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:15.495404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.495456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:58:15.495463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.495489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:58:15.495500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:58:15.496050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:15.496063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:15.496100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.496106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:58:15.496172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.496179Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:58:15.496192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:15.496196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.496201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:15.496204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.496208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:58:15.496214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.496218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:58:15.496221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:58:15.496235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:15.496241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:58:15.496245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:58:15.496608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:58:15.496624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 44 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:21.785569Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:21.785595Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:21.785599Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-05T09:58:21.785603Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T09:58:21.785612Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:58:21.785617Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:127:2152] 2025-05-05T09:58:21.786205Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:21.786276Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:21.786293Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:58:21.786303Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:58:21.786311Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:21.786315Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:58:21.786320Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-05T09:58:21.786773Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:21.786801Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:58:21.786808Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:476:2437] TestWaitNotification: OK eventTxId 102 2025-05-05T09:58:21.786997Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-05-05T09:58:21.787025Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 38us result status StatusSuccess 2025-05-05T09:58:21.787127Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 1 2025-05-05T09:58:21.787174Z node 5 :EXPORT DEBUG: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-05-05T09:58:21.787687Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:21.787703Z node 5 :EXPORT DEBUG: TExport::TTxProgress: Resume: id# 102 2025-05-05T09:58:21.787730Z node 5 :EXPORT INFO: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-05-05T09:58:21.787739Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:21.787756Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2025-05-05T09:58:21.787761Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:21.787765Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2025-05-05T09:58:21.787771Z node 5 :EXPORT INFO: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-05-05T09:58:21.787784Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:21.788404Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:58:21.788435Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-05-05T09:58:21.788460Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, at schemeshard: 72057594046678944 2025-05-05T09:58:21.789008Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:21.789048Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37, operation: DROP DIRECTORY, path: /MyRoot/export-102 2025-05-05T09:58:21.789072Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-05-05T09:58:21.789081Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-05-05T09:58:21.789089Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:21.789093Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-05-05T09:58:21.789100Z node 5 :EXPORT TRACE: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:37" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-05-05T09:58:21.789121Z node 5 :EXPORT INFO: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-05-05T09:58:21.789441Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:21.789464Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:21.789481Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:58:21.789490Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:58:21.789494Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:21.789498Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:58:21.789502Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-05-05T09:58:21.789956Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2025-05-05T09:58:21.790017Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-05-05T09:58:21.790025Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-05-05T09:58:21.790104Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-05-05T09:58:21.790121Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:58:21.790139Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:693:2650] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> test.py::test[in-in_tablesource_to_equijoin--Results] >> test.py::test[tpch-q17-default.txt-ForceBlocks] >> test.py::test[join-no_empty_join_for_dyn--ForceBlocks] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--Results] [SKIPPED] >> test.py::test[join-star_join_mirror-off-ForceBlocks] >> test.py::test[join-full_trivial_udf_call--ForceBlocks] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] >> TExportToS3Tests::ExportIndexTablePartitioningSettings |95.7%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v1-date_null/parse_error/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/as_default/test.csv] >> test.py::test[pg-tpcds-q26-default.txt-Results] >> test.py::test[optimizers-length_over_merge--ForceBlocks] >> test.py::test[join-join_and_distinct_key-off-ForceBlocks] >> test.py::test[action-dep_world_quote_code-default.txt-ForceBlocks] >> test.py::test[key_filter-split_input_with_key_filter2--Results] [SKIPPED] >> test.py::test[key_filter-utf8_with_legacy--Results] >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> test.py::test[coalesce-coalesce_few_opt--ForceBlocks] >> test.py::test[produce-reduce_with_assume--Results] [SKIPPED] >> test.py::test[ql_filter-integer_bounds--Results] >> test.py::test[hor_join-max_outtables--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-max_outtables--Results] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse--Results] [SKIPPED] >> test.py::test[in-in_enum_single1-default.txt-ForceBlocks] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] >> test.py::test[sampling-mapjoin_left_sample-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_recovery_match_recognize.py::TestRecoveryMatchRecognize::test_match_recognize[v1-kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3710290) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] >> test.py::test[aggregate-group_by_ru_join--Results] >> test.py::test[pg-tpcds-q90-default.txt-ForceBlocks] >> test.py::test[join-opt_on_opt_side--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input--ForceBlocks] >> test.py::test[blocks-combine_all_minmax_nested--ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_list] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_start_new_query [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3718112) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_each_row] >> test.py::test[join-mergejoin_semi_to_inner--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:58:22.084009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:58:22.084035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:22.084041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:58:22.084046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:58:22.084058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:58:22.084062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:58:22.084072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:22.084086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:58:22.084168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:58:22.084241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:58:22.099518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:58:22.099540Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:58:22.103657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:58:22.104130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:58:22.104226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:58:22.106426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:58:22.106511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:58:22.106668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:22.107036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:58:22.108137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:22.108515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:22.108528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:22.108549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:58:22.108557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:22.108564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:58:22.108604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:58:22.111470Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:58:22.131955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:58:22.132051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:22.132130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:58:22.132180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:58:22.132190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:22.133185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:22.133217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:58:22.133279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:22.133291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:58:22.133297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:58:22.133303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:58:22.133939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:22.133952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:22.133958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:58:22.134352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:22.134361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:22.134365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:22.134370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:58:22.135078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:22.135583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:58:22.135621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:58:22.135799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:22.135821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:22.135836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:22.135904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:58:22.135919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:22.135955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:58:22.135965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:58:22.136363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:22.136369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:22.136410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:22.136417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:58:22.136491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:22.136497Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:58:22.136509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:22.136512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:22.136516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:22.136518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:22.136521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:58:22.136525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:22.136528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:58:22.136531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:58:22.136541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:22.136546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:58:22.136548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:58:22.136801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:58:22.136813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... hardId: 72075186233409548 CpuTimeUsec: 37 } } 2025-05-05T09:58:23.872777Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-05-05T09:58:23.872807Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2025-05-05T09:58:23.872812Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-05-05T09:58:23.873234Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:23.873272Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:23.873279Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:23.873291Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:23.873324Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:23.873704Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-05-05T09:58:23.873733Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2025-05-05T09:58:23.873812Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:23.873839Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:23.873845Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-05-05T09:58:23.873871Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-05-05T09:58:23.873896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-05-05T09:58:23.880428Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:23.880444Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T09:58:23.880521Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:23.880527Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2025-05-05T09:58:23.880645Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:23.880656Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:6831 Accept: */* Connection: Upgrade, HTTP2-Settings FAKE_COORDINATOR: Erasing txId 281474976710759 Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0CEA1558-9BE1-46B3-B4E0-D4AFECBB4D8C amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2025-05-05T09:58:23.880858Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T09:58:23.880873Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-05-05T09:58:23.880878Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-05-05T09:58:23.880884Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-05-05T09:58:23.880890Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-05-05T09:58:23.880911Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2025-05-05T09:58:23.881750Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:6831 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 381AF709-7CCD-4C43-8BD0-5D7B9CBBB141 amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:6831 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 83C149F6-FE50-4BD6-9348-3024C5D50709 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-05-05T09:58:23.884877Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 512 RawX2: 17179871654 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:23.884896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2025-05-05T09:58:23.884920Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 512 RawX2: 17179871654 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:23.884935Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 512 RawX2: 17179871654 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-05-05T09:58:23.884947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:23.884951Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:23.884956Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-05-05T09:58:23.884962Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-05-05T09:58:23.885015Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:23.885510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:23.885554Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:23.885563Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-05T09:58:23.885576Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T09:58:23.885580Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:23.885586Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T09:58:23.885590Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:23.885598Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-05T09:58:23.885612Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710759 2025-05-05T09:58:23.885616Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:23.885620Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-05T09:58:23.885623Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-05-05T09:58:23.885649Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-05-05T09:58:23.886149Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-05T09:58:23.886162Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-05-05T09:58:23.886550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:58:23.886561Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:538:2488] TestWaitNotification: OK eventTxId 102 >> test_ydb_backup.py::TestSingleBackupRestore::test_single_table_with_data_backup_restore >> test.py::test[aggregate-list_with_fold_map--Results] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] [GOOD] >> test.py::test[file-file_list_simple--ForceBlocks] |95.7%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/as_default/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/parse_error/test.csv] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> test.py::test[bigdate-tz_table_rw--Results] >> test.py::test[pg-tpcds-q26-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Results] >> test.py::test[join-full_trivial_udf_call--ForceBlocks] [GOOD] >> test.py::test[join-full_trivial_udf_call--Results] >> test.py::test[ql_filter-integer_bounds--Results] [GOOD] >> test.py::test[ql_filter-integer_many_left--Results] >> test.py::test[optimizers-length_over_merge--ForceBlocks] [GOOD] >> test.py::test[optimizers-length_over_merge--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:54.918582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:54.918610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:54.918615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:54.918620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:54.918626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:54.918629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:54.918637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:54.918664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:54.918749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:54.918819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:54.932272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:54.932297Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:54.932376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:54.935063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:54.935127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:54.935155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:54.939081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:54.939148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:54.939267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:54.939322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:54.939806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:54.940053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:54.940063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:54.940120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:54.940129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:54.940134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:54.940159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:54.941507Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:54.961026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:54.961102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.961163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:54.961228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:54.961237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.961995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:54.962021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:54.962087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.962096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:54.962100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:54.962105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:54.962566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.962578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:54.962583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:54.962957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.962967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.962972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:54.962979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:54.963640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:54.964166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:54.964210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:54.964413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:54.964444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:54.964452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:54.964512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:54.964521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:54.964559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:54.964573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:54.965058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:54.965065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:54.965118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:54.965124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:54.965134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.965141Z node 1 :FLAT_TX_SCHEMESHARD I ... 7594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.294500Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.294503Z node 110 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:25.294506Z node 110 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 7 2025-05-05T09:58:25.294508Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T09:58:25.294518Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:25.294927Z node 110 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:25.294982Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:25.294988Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:25.294993Z node 110 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:25.295041Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:58:25.295062Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T09:58:25.295111Z node 110 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:25.295126Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 472446404708 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:25.295130Z node 110 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T09:58:25.295148Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:58:25.295155Z node 110 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:25.295157Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:25.295161Z node 110 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:25.295163Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:25.295170Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:25.295176Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T09:58:25.295180Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:58:25.295184Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:25.295186Z node 110 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:58:25.295189Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:58:25.295195Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T09:58:25.295198Z node 110 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:58:25.295201Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T09:58:25.295203Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-05T09:58:25.295712Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.295771Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-05-05T09:58:25.295781Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-05T09:58:25.295830Z node 110 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:25.295972Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.296083Z node 110 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:25.296090Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:25.296123Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-05T09:58:25.296146Z node 110 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:25.296151Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [110:207:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:58:25.296156Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [110:207:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:58:25.296298Z node 110 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.296308Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.296312Z node 110 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:25.296317Z node 110 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T09:58:25.296321Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T09:58:25.296429Z node 110 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.296450Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.296454Z node 110 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:25.296458Z node 110 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-05T09:58:25.296461Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T09:58:25.296472Z node 110 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:58:25.296476Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [110:126:2151] 2025-05-05T09:58:25.296533Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:25.296538Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T09:58:25.296550Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:25.296913Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.297128Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:25.297151Z node 110 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:58:25.297161Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:58:25.297169Z node 110 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:25.297173Z node 110 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:58:25.297178Z node 110 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T09:58:25.297229Z node 110 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:25.297486Z node 110 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T09:58:25.297529Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T09:58:25.297536Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T09:58:25.297598Z node 110 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T09:58:25.297613Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T09:58:25.297617Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [110:745:2703] TestWaitNotification: OK eventTxId 1004 >> test.py::test[pg-tpcds-q90-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-Results] >> test.py::test[in-in_enum_single1-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_enum_single1-default.txt-Results] >> test.py::test[tpch-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q17-default.txt-Results] >> test.py::test[join-join_and_distinct_key-off-ForceBlocks] [GOOD] >> test.py::test[join-join_and_distinct_key-off-Results] [SKIPPED] >> test.py::test[join-join_semi_correlation_in_order_by-off-ForceBlocks] >> test.py::test[sampling-mapjoin_left_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] >> test.py::test[coalesce-coalesce_few_opt--ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_few_opt--Results] >> test.py::test[join-opt_on_opt_side--Results] [GOOD] >> test.py::test[join-premap_common_multiparents--ForceBlocks] >> test.py::test[pg-tpcds-q65-default.txt-Results] >> test.py::test[join-star_join_mirror-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_mirror-off-Results] [SKIPPED] >> test.py::test[join-yql-12022-off-ForceBlocks] >> test_format_setting.py::TestS3::test_date_null_with_not_null_type[v2-date_null/parse_error/test.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/as_default/multi_null.csv] |95.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> test.py::test[optimizers-length_over_merge--Results] [GOOD] >> test.py::test[optimizers-nonselected_direct_row--ForceBlocks] >> test.py::test[join-mergejoin_semi_to_inner--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Results] >> test.py::test[pg-tpcds-q90-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] >> test.py::test[key_filter-utf8_with_legacy--Results] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] >> test.py::test[pg-tpcds-q98-default.txt-ForceBlocks] >> test.py::test[in-in_enum_single1-default.txt-Results] [GOOD] >> test.py::test[in-in_with_table_of_tuples-default.txt-ForceBlocks] >> test.py::test[file-file_list_simple--ForceBlocks] [GOOD] >> test.py::test[file-file_list_simple--Results] >> test.py::test[action-dep_world_quote_code-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] [GOOD] >> test.py::test[weak_field-weak_field_opt--Results] >> test.py::test[blocks-combine_all_minmax_nested--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_minmax_nested--Results] >> test.py::test[join-full_trivial_udf_call--Results] [GOOD] >> test.py::test[join-full_trivial_udf_call-off-ForceBlocks] >> test.py::test[pg-tpcds-q30-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q31-default.txt-Results] |95.8%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] >> test.py::test[bigdate-tz_table_rw--Results] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Results] >> test.py::test[coalesce-coalesce_few_opt--Results] [GOOD] >> test.py::test[column_group-hint_dup_def_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-select_orderby-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_ru_join--Results] [GOOD] >> test.py::test[aggregate-group_by_session_star--ForceBlocks] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-json_each_row] [GOOD] >> test.py::test[action-eval_folder_via_file_in_job--ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-csv_with_names] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/as_default/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/parse_error/multi_null.csv] >> test.py::test[tpch-q17-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_compact_sorted_distinct--ForceBlocks] >> test.py::test[tpch-q22-default.txt-ForceBlocks] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed [GOOD] >> test.py::test[aggregate-list_with_fold_map--Results] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-Results] >> test.py::test[ql_filter-integer_many_left--Results] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--Results] >> test.py::test[file-file_list_simple--Results] [GOOD] >> test.py::test[file-where_key_in_get_file_content--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3720991) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:57:44] send response localhost:2218/?database=local ::1 - - [05/May/2025 09:57:44] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_ydb_backup.py::TestSingleBackupRestore::test_single_table_with_data_backup_restore [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-ForceBlocks] >> test.py::test[action-dep_world_quote_code-default.txt-Results] [GOOD] >> test.py::test[action-empty_do-default.txt-ForceBlocks] >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-Results] >> test.py::test[blocks-combine_all_minmax_nested--Results] [GOOD] >> test.py::test[blocks-combine_hashed_sum--ForceBlocks] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--ForceBlocks] >> test.py::test[join-anyjoin_common_nodup--Results] >> test_format_setting.py::TestS3::test_date_null_multi[v1-date_null/parse_error/multi_null.csv] [GOOD] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/as_default/multi_null.csv] >> test.py::test[join-join_semi_correlation_in_order_by-off-ForceBlocks] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-left_cast_to_string--ForceBlocks] >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table >> test.py::test[pg-tpcds-q31-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Results] >> test.py::test[pg-tpcds-q98-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q98-default.txt-Results] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_limit--Results] [SKIPPED] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:51.690376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:51.690406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:51.690412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:51.690417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:51.690423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:51.690426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:51.690436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:51.690451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:51.690547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:51.690624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:51.703319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:51.703347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:51.703445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:51.705527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:51.705572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:51.705603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:51.707036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:51.707089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:51.707214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:51.707284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:51.707833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:51.708175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:51.708187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:51.708236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:51.708244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:51.708251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:51.708280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:51.710184Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:51.731734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:51.731829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.731901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:51.731992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:51.732004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.732930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:51.732960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:51.733040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.733051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:51.733056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:51.733061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:51.733500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.733510Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:51.733516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:51.733893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.733903Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.733908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:51.733916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:51.734566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:51.735130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:51.735189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:51.735431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:51.735463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:51.735472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:51.735539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:51.735548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:51.735593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:51.735608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:51.738721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:51.738744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:51.738824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:51.738833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:51.738850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.738860Z node 1 :FLAT_TX_SCHEMESHARD I ... 7594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.974078Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.974083Z node 121 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:28.974087Z node 121 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 7 2025-05-05T09:58:28.974092Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T09:58:28.974110Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:28.974386Z node 121 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:28.974437Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:28.974442Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:28.974447Z node 121 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:28.974526Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:58:28.974541Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T09:58:28.974812Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:28.974831Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 519691044971 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:28.974839Z node 121 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T09:58:28.974864Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:58:28.974870Z node 121 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:28.974873Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:28.974876Z node 121 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:28.974878Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:28.974884Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:28.974891Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T09:58:28.974895Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:58:28.974902Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:28.974904Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:58:28.974908Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:58:28.974944Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T09:58:28.974949Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:58:28.974953Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T09:58:28.974956Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-05-05T09:58:28.975138Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.975165Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T09:58:28.975172Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-05T09:58:28.975247Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:28.975581Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.975953Z node 121 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:28.975966Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:28.975998Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T09:58:28.976025Z node 121 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:28.976030Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [121:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:58:28.976036Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [121:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:58:28.976207Z node 121 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.976223Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.976228Z node 121 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:28.976232Z node 121 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T09:58:28.976237Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:28.976353Z node 121 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.976364Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.976368Z node 121 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:28.976371Z node 121 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-05T09:58:28.976375Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T09:58:28.976387Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:58:28.976391Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [121:125:2151] 2025-05-05T09:58:28.976477Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:28.976485Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-05T09:58:28.976496Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:28.977405Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.977532Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:28.977556Z node 121 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:28.977569Z node 121 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:58:28.977582Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:58:28.977591Z node 121 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:28.977596Z node 121 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:58:28.977601Z node 121 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T09:58:28.978106Z node 121 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T09:58:28.978178Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T09:58:28.978187Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T09:58:28.978266Z node 121 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T09:58:28.978286Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T09:58:28.978291Z node 121 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [121:973:2877] TestWaitNotification: OK eventTxId 1004 >> test.py::test[join-premap_common_multiparents--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents--Results] >> test.py::test[lineage-reduce-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_union_all-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_no_aggregate_split--Results] >> test.py::test[join-yql-12022-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-12022-off-Results] [SKIPPED] >> test.py::test[json-json_value/example--ForceBlocks] >> test.py::test[binding-table_filter_binding-default.txt-Results] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-Results] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] >> test.py::test[optimizers-nonselected_direct_row--ForceBlocks] [GOOD] >> test.py::test[optimizers-nonselected_direct_row--Results] >> test.py::test[weak_field-weak_field_opt--Results] [GOOD] >> test.py::test[weak_field-weak_field_wrong_types_fail--Results] >> test.py::test[schema-select_all-row_spec_diff_sort2-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] >> test.py::test[column_order-select_orderby-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_orderby-default.txt-Results] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_table.py::TestExecuteQueryWithParamsFromStdin::test_skip_rows_tsv[scan] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-Results] >> test.py::test[pg-tpcds-q98-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_many_noskiff--Results] [GOOD] >> test.py::test[ql_filter-integer_members_eval--Results] >> test.py::test[in-in_with_table_of_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_table_of_tuples-default.txt-Results] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/as_default/multi_null.csv] [GOOD] |95.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_impex.py::TestImpex::test_import_stdin_with_bom[json-additional_args4-column] [GOOD] >> test.py::test[join-full_trivial_udf_call-off-ForceBlocks] [GOOD] >> test.py::test[join-full_trivial_udf_call-off-Results] [SKIPPED] >> test.py::test[join-grace_join1--ForceBlocks] >> test.py::test[action-eval_folder_via_file_in_job--ForceBlocks] [GOOD] >> test.py::test[action-eval_folder_via_file_in_job--Results] >> test.py::test[file-where_key_in_get_file_content--ForceBlocks] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Results] >> test.py::test[action-empty_do-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-empty_do-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--ForceBlocks] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/parse_error/multi_null.csv] >> test.py::test[optimizers-nonselected_direct_row--Results] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q94-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Results] >> test.py::test[column_order-select_orderby-default.txt-Results] [GOOD] >> test.py::test[count-count_all_view_concat--ForceBlocks] >> test.py::test[weak_field-weak_field_wrong_types_fail--Results] [GOOD] >> test.py::test[window-current/aggregations_leadlag--Results] >> test.py::test[aggregate-group_compact_sorted_distinct--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] >> test.py::test[join-mergejoin_semi_to_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:46.836065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:46.836099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:46.836105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:46.836111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:46.836117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:46.836121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:46.836131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:46.836147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:46.836268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:46.836352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:46.850118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:46.850147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:46.850248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:46.852584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:46.852635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:46.852666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:46.853832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:46.853885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:46.854028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.854092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:46.854609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.854972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:46.854984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.855031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:46.855038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:46.855044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:46.855072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:46.856988Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:46.876508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:46.876598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.876668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:46.876740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:46.876752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.877587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.877618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:46.877697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.877709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:46.877714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:46.877719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:46.880163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.880213Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:46.880221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:46.880787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.880801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.880808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.880816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:46.881468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:46.882178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:46.882235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:46.882475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.882513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:46.882522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.882615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:46.882625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.882678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:46.882693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:46.883252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:46.883262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:46.883312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.883316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:46.883325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.883330Z node 1 :FLAT_TX_SCHEMESHARD I ... 5T09:58:30.742777Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:30.742849Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-05-05T09:58:30.742932Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T09:58:30.743717Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:30.743729Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T09:58:30.743746Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:30.744300Z node 157 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:30.744374Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:58:30.744395Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-05-05T09:58:30.744429Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:30.744433Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:30.744439Z node 157 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-05T09:58:30.744490Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:30.744526Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:30.744546Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 674309867628 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:30.744557Z node 157 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-05T09:58:30.744583Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:58:30.744591Z node 157 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:30.744595Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:30.744600Z node 157 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:30.744603Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:30.744611Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:30.744620Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:58:30.744626Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:58:30.744632Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:30.744636Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:58:30.744639Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:58:30.744650Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:58:30.744655Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:58:30.744659Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T09:58:30.744663Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T09:58:30.745389Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:30.745420Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-05-05T09:58:30.745430Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-05T09:58:30.745449Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:30.745890Z node 157 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:30.745903Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:30.745934Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:58:30.745957Z node 157 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:30.745962Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [157:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:58:30.745970Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [157:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:58:30.746129Z node 157 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:30.746141Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:30.746145Z node 157 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:30.746150Z node 157 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T09:58:30.746154Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:30.746256Z node 157 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:30.746265Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:30.746269Z node 157 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:30.746272Z node 157 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T09:58:30.746276Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:58:30.746286Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:58:30.746291Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [157:123:2149] 2025-05-05T09:58:30.746349Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:30.746354Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T09:58:30.746364Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:30.747078Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:30.747207Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:30.747224Z node 157 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:58:30.747236Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:58:30.747246Z node 157 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:30.747251Z node 157 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:58:30.747257Z node 157 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-05T09:58:30.747318Z node 157 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:30.747661Z node 157 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T09:58:30.747723Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T09:58:30.747731Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T09:58:30.747800Z node 157 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T09:58:30.747817Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T09:58:30.747821Z node 157 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [157:721:2679] TestWaitNotification: OK eventTxId 1003 >> test.py::test[action-empty_do-default.txt-Results] [GOOD] >> test.py::test[action-eval_typeof_output_table--ForceBlocks] >> test.py::test[pg-tpch-q11-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_sum--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_sum--Results] >> test.py::test[pg-tpcds-q34-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q35-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner--Results] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-csv_with_names] [GOOD] >> test.py::test[action-eval_folder_via_file_in_job--Results] [GOOD] >> test.py::test[action-subquery_merge2-default.txt-ForceBlocks] >> test.py::test[join-left_cast_to_string--ForceBlocks] [GOOD] >> test.py::test[join-left_cast_to_string--Results] >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-parquet] >> test.py::test[binding-table_from_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min--Results] >> test.py::test[tpch-q22-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q22-default.txt-Results] >> test.py::test[file-where_key_in_get_file_content--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--ForceBlocks] >> test.py::test[optimizers-yql-5833-table_content--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Results] >> test.py::test[join-premap_common_multiparents--Results] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-ForceBlocks] >> test.py::test[aggregate-group_by_session_star--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_star--Results] >> test.py::test[in-in_with_table_of_tuples-default.txt-Results] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_date_null_multi[v2-date_null/parse_error/multi_null.csv] [GOOD] >> test.py::test[action-eval_typeof_output_table--ForceBlocks] [GOOD] >> test.py::test[action-eval_typeof_output_table--Results] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/as_default/multi_null.csv] >> test.py::test[agg_apply-avg_const_interval--ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names--ForceBlocks] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-default-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-default-Results] >> test.py::test[blocks-combine_hashed_sum--Results] [GOOD] >> test.py::test[blocks-date_greater--ForceBlocks] >> test.py::test[bigdate-table_yt_key_filter-default-Results] [SKIPPED] >> test.py::test[binding-table_range_binding-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_members_eval--Results] [GOOD] >> test.py::test[ql_filter-integer_optional--Results] >> test.py::test[pg-tpcds-q99-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-Results] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] >> test.py::test[optimizers-yql-5833-table_content--Results] [GOOD] >> test.py::test[order_by-literal_with_assume_desc--ForceBlocks] >> test.py::test[join-left_cast_to_string--Results] [GOOD] >> test.py::test[join-left_semi_with_other--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_yq_streaming.py::TestYqStreaming::test_early_finish_case5[v1] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3728504) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[hor_join-runtime_dep-default.txt-Results] [GOOD] >> test.py::test[in-huge_in-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] >> test.py::test[join-grace_join1--ForceBlocks] [GOOD] >> test.py::test[join-grace_join1--Results] [SKIPPED] >> test.py::test[join-join_key_cmp_udf-off-ForceBlocks] >> test.py::test[count-count_all_view_concat--ForceBlocks] [GOOD] >> test.py::test[count-count_all_view_concat--Results] >> test.py::test[pg-tpch-q12-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-nested_semi_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_star--Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-ForceBlocks] >> test.py::test[action-subquery_merge2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-Results] >> test.py::test[join-nested_semi_join--Results] >> test.py::test[action-subquery_merge2-default.txt-Results] >> test.py::test[flatten_by-flatten_by_typed_table--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_typed_table--Results] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] [GOOD] >> test.py::test[schema-select_simple-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q35-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q44-default.txt-Results] >> test.py::test[pg-tpcds-q89-default.txt-Results] >> test.py::test[tpch-q22-default.txt-Results] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/as_default/multi_null.csv] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin--Results] [GOOD] >> test.py::test[in-yql-10038-default.txt-Results] >> test.py::test[tpch-q3-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/parse_error/multi_null.csv] >> test.py::test[count-count_all_view_concat--Results] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-ForceBlocks] >> test.py::test[json-json_value/example--ForceBlocks] [GOOD] >> test.py::test[json-json_value/example--Results] >> test.py::test[type_v3-decimal_yt_nollvm--Results] >> test_statistics.py::TestS3::test_convert[v2-client0-json_list-parquet] [GOOD] >> test.py::test[optimizers-test_no_aggregate_split--Results] [GOOD] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--Results] [SKIPPED] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_list] >> test.py::test[distinct-distinct_star-default.txt-Results] >> test.py::test[action-subquery_merge2-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-every-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_semi_to_inner--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_left--Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--Results] [SKIPPED] >> test.py::test[join-order_of_qualified--Results] >> test.py::test[flatten_by-flatten_by_typed_table--Results] [GOOD] >> test.py::test[hor_join-merge_multiouts_all--ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_int_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-default-Results] [SKIPPED] >> test.py::test[bigdate-tz_table_yt_key_filter--Results] [SKIPPED] >> test.py::test[blocks-add_int64--Results] >> test.py::test[hor_join-merge_multiouts_all--Results] [SKIPPED] >> test.py::test[hor_join-skip_sampling--ForceBlocks] >> test.py::test[ql_filter-integer_optional--Results] [GOOD] >> test.py::test[ql_filter-integer_select_other--Results] >> test.py::test[join-premap_common_multiparents_no_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-Results] >> test.py::test[join-premap_common_multiparents_no_premap-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_with_remap--ForceBlocks] >> test.py::test[blocks-combine_all_min--Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter--Results] >> test.py::test[agg_apply-avg_const_interval--ForceBlocks] [GOOD] >> test.py::test[agg_apply-avg_const_interval--Results] >> test.py::test[pg-tpch-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-ForceBlocks] >> test.py::test[order_by-literal_with_assume_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_with_assume_desc--Results] >> test.py::test[pg-tpch-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q11-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names--Results] >> test_format_setting.py::TestS3::test_string_not_null_multi[v1-date_null/parse_error/multi_null.csv] [GOOD] >> test.py::test[join-nested_semi_join--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-ForceBlocks] >> test.py::test[join-left_semi_with_other--ForceBlocks] [GOOD] >> test.py::test[join-left_semi_with_other--Results] >> test.py::test[pg-tpcds-q44-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q45-default.txt-Results] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/as_default/multi_null.csv] >> TExportToS3Tests::ShouldCheckQuotasExportsLimited >> test.py::test[json-json_value/example--Results] [GOOD] >> test.py::test[key_filter-between_with_key_filter--ForceBlocks] >> test.py::test[join-join_key_cmp_udf-off-ForceBlocks] [GOOD] >> test.py::test[join-join_key_cmp_udf-off-Results] [SKIPPED] >> test.py::test[join-left_all-off-ForceBlocks] >> test.py::test[schema-select_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-select_simple-default.txt-Results] >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTables [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Results] [GOOD] >> test.py::test[view-view_with_library--ForceBlocks] >> test.py::test[window-current/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-current/ansi_current--Results] >> test.py::test[pg-tpcds-q89-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_with_python_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order--Results] [SKIPPED] >> test.py::test[sampling-read--ForceBlocks] >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] >> test.py::test[order_by-literal_with_assume_desc--Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] >> test.py::test[agg_apply-avg_const_interval--Results] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-ForceBlocks] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited >> test.py::test[binding-table_range_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] >> test.py::test[order_by-order_by_expr--ForceBlocks] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] >> test.py::test[join-cbo_4tables--Results] [SKIPPED] >> test.py::test[join-flatten_columns2--Results] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] >> test.py::test[blocks-add_int64--Results] [GOOD] >> test.py::test[blocks-add_uint64--Results] >> test.py::test[ql_filter-integer_select_other--Results] [GOOD] >> test.py::test[result_types-singular-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:58:15.429785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:58:15.429812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:15.429819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:58:15.429823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:58:15.429835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:58:15.429839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:58:15.429850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:15.429863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:58:15.429990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:58:15.430078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:58:15.441634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:58:15.441657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:58:15.445189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:58:15.445487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:58:15.445524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:58:15.446872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:58:15.446948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:58:15.447045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.447360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:58:15.448596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.448903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:15.448916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.448934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:58:15.448942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:15.448948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:58:15.448984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.450401Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:58:15.471753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:58:15.471828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.471890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:58:15.471943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:58:15.471954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.472612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.472638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:58:15.472676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.472686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:58:15.472690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:58:15.472695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:58:15.473113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.473129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:15.473133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:58:15.473489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.473497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.473502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.473509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.474131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:15.474592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:58:15.474628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:58:15.474806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.474832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:15.474850Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.474934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:58:15.474945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.474975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:58:15.474987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:58:15.475437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:15.475446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:15.475481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.475487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:58:15.475545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.475553Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:58:15.475564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:15.475573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.475578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:15.475581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.475585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:58:15.475590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.475594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:58:15.475598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:58:15.475609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:15.475614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:58:15.475618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:58:15.475936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:58:15.475956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... is olap=0, RowCount 2, DataSize 70 2025-05-05T09:58:30.841730Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-05-05T09:58:30.841749Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-05T09:58:30.841760Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-05-05T09:58:30.841770Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-05-05T09:58:30.852020Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-05T09:58:34.174340Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0021 2025-05-05T09:58:34.195209Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0017 2025-05-05T09:58:34.225780Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-05-05T09:58:34.225875Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-05-05T09:58:34.225912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-05-05T09:58:34.225956Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-05-05T09:58:34.225973Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-05T09:58:34.225984Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-05-05T09:58:34.225993Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-05-05T09:58:34.236234Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-05T09:58:37.587040Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-05-05T09:58:37.587100Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-05-05T09:58:37.587121Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-05-05T09:58:37.628582Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0012 2025-05-05T09:58:37.659923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0011 2025-05-05T09:58:37.700525Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-05-05T09:58:37.700614Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-05-05T09:58:37.700656Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 2, DataSize 70 2025-05-05T09:58:37.700696Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409546 2025-05-05T09:58:37.700709Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-05-05T09:58:37.700719Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0, RowCount 0, DataSize 0, with borrowed parts 2025-05-05T09:58:37.700729Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Do not want to split tablet 72075186233409547 2025-05-05T09:58:37.710986Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-05-05T09:58:38.653360Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [4:572:2530], attempt# 1 2025-05-05T09:58:38.656380Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [4:571:2529] 2025-05-05T09:58:38.658120Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [4:572:2530], sender# [4:571:2529] 2025-05-05T09:58:38.658145Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [4:571:2529] 2025-05-05T09:58:38.658165Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [4:572:2530], sender# [4:571:2529], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } 2025-05-05T09:58:38.658225Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [4:572:2530], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:1912 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B661E63C-8B7E-47D6-8307-BC3D24E1CE8A amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-138-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-05-05T09:58:38.659925Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [4:572:2530], result# 2025-05-05T09:58:38.660019Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [4:571:2529], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-05-05T09:58:38.663894Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:58:38.663923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-05-05T09:58:38.663956Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:58:38.663973Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-05-05T09:58:38.663991Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:38.663996Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:38.664002Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-05-05T09:58:38.664011Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-05-05T09:58:38.664094Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:38.664892Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:38.665006Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-05-05T09:58:38.665017Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-05-05T09:58:38.665032Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T09:58:38.665037Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:38.665043Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-05-05T09:58:38.665046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:38.665051Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-05-05T09:58:38.665082Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:124:2150] message: TxId: 281474976710759 2025-05-05T09:58:38.665090Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-05-05T09:58:38.665096Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-05-05T09:58:38.665101Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-05-05T09:58:38.665131Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T09:58:38.665685Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-05-05T09:58:38.665701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-05-05T09:58:38.666156Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:58:38.666175Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:590:2545] TestWaitNotification: OK eventTxId 102 >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_list] [GOOD] >> test.py::test[schema-select_simple-default.txt-Results] [GOOD] >> test.py::test[schema-skip_complex_type--ForceBlocks] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/as_default/multi_null.csv] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_each_row] >> test.py::test[in-huge_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-huge_in-default.txt-Results] >> test.py::test[tpch-q3-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q3-default.txt-Results] >> test.py::test[distinct-distinct_count_and_avg-default.txt-ForceBlocks] [GOOD] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/parse_error/multi_null.csv] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] >> test.py::test[ansi_idents-order_by-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter--ForceBlocks] >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint64_opt2--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:56.432849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:56.432879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:56.432885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:56.432889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:56.432895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:56.432898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:56.432907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:56.432921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:56.433017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:56.433098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:56.446843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:56.446869Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:56.447098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:56.448910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:56.448945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:56.448966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:56.449873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:56.449927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:56.450032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:56.450077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:56.450472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:56.450719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:56.450730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:56.450772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:56.450782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:56.450787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:56.450810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:56.452089Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:56.472044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:56.472119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.472171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:56.472235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:56.472247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.472966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:56.472998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:56.473059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.473069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:56.473075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:56.473080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:56.473551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.473564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:56.473569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:56.474036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.474051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.474056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:56.474062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:56.474716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:56.475198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:56.475231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:56.475414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:56.475438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:56.475445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:56.475498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:56.475505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:56.475532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:56.475543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:56.476051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:56.476061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:56.476100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:56.476105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:56.476115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:56.476121Z node 1 :FLAT_TX_SCHEMESHARD I ... 7594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.306668Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.306673Z node 158 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:38.306678Z node 158 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 7 2025-05-05T09:58:38.306683Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T09:58:38.306699Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:38.306902Z node 158 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:38.307010Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:38.307016Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:38.307022Z node 158 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:38.307042Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:58:38.307068Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T09:58:38.307391Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:38.307415Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 678604834916 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:38.307424Z node 158 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T09:58:38.307454Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:58:38.307466Z node 158 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:38.307470Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:38.307476Z node 158 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:38.307480Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:38.307490Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:38.307505Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T09:58:38.307511Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:58:38.307519Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:38.307523Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:58:38.307528Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:58:38.307538Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T09:58:38.307545Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:58:38.307550Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T09:58:38.307554Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-05T09:58:38.307726Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.307747Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-05-05T09:58:38.307757Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-05-05T09:58:38.308346Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:38.308380Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.308760Z node 158 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:38.308771Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:38.308816Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-05T09:58:38.308843Z node 158 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:38.308848Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [158:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:58:38.308854Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [158:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:58:38.309032Z node 158 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.309044Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.309049Z node 158 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:38.309054Z node 158 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T09:58:38.309063Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T09:58:38.309215Z node 158 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.309225Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.309230Z node 158 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:38.309234Z node 158 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-05T09:58:38.309239Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T09:58:38.309250Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:58:38.309255Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [158:129:2154] 2025-05-05T09:58:38.309279Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:38.309285Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T09:58:38.309296Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:38.309768Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.310111Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:38.310137Z node 158 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:58:38.310149Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:58:38.310159Z node 158 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:38.310165Z node 158 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:58:38.310170Z node 158 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T09:58:38.310231Z node 158 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:38.310674Z node 158 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T09:58:38.310762Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T09:58:38.310770Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T09:58:38.310853Z node 158 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T09:58:38.310870Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T09:58:38.310879Z node 158 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [158:742:2699] TestWaitNotification: OK eventTxId 1004 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:108:2140] 2025-05-05T09:58:15.473022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:58:15.473042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:15.473048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:58:15.473052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:58:15.473062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:58:15.473066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:58:15.473076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:58:15.473088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:58:15.473173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:58:15.473231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:58:15.486634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-05-05T09:58:15.486674Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:58:15.490519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:58:15.490855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:58:15.490900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:58:15.492222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:58:15.492272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:58:15.492362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.492626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:58:15.493516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.493759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:15.493770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.493788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:58:15.493796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:15.493802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:58:15.493830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.495120Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:237:2058] recipient: [1:15:2062] 2025-05-05T09:58:15.508989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:58:15.509049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.509099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:58:15.509153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:58:15.509160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.509783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.509802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:58:15.509832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.509838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:58:15.509842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:58:15.509845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:58:15.510168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.510175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:58:15.510178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:58:15.510458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.510466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.510470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.510474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.510970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:15.511335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:58:15.511361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:58:15.511486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:15.511507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:15.511520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.511559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:58:15.511565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:58:15.511590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:58:15.511598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:58:15.512035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:15.512043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:15.512083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:15.512089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:58:15.512145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:58:15.512151Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-05-05T09:58:15.512159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:15.512162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.512165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-05-05T09:58:15.512167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.512170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-05-05T09:58:15.512173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-05-05T09:58:15.512176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-05-05T09:58:15.512179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-05-05T09:58:15.512187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:15.512190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-05-05T09:58:15.512193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-05-05T09:58:15.512438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-05-05T09:58:15.512452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: ... 57594046678944 2025-05-05T09:58:39.135766Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-05-05T09:58:39.135777Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2025-05-05T09:58:39.135809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:58:39.136013Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.136028Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.136033Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-05-05T09:58:39.136039Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-05-05T09:58:39.136048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:39.136105Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.136115Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.136119Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-05-05T09:58:39.136123Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-05-05T09:58:39.136126Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-05T09:58:39.136135Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-05-05T09:58:39.136780Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:39.137051Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2025-05-05T09:58:39.137061Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-05-05T09:58:39.137068Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2025-05-05T09:58:39.137176Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2025-05-05T09:58:39.137210Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2025-05-05T09:58:39.137292Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.137372Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:39.137401Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:39.137409Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2025-05-05T09:58:39.137446Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-05-05T09:58:39.137459Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-05-05T09:58:39.137463Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-05-05T09:58:39.137469Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-05-05T09:58:39.137473Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-05-05T09:58:39.137487Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-05-05T09:58:39.137501Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T09:58:39.137508Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2025-05-05T09:58:39.137516Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-05-05T09:58:39.137521Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2025-05-05T09:58:39.137525Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2025-05-05T09:58:39.137539Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-05-05T09:58:39.137546Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2025-05-05T09:58:39.137550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-05-05T09:58:39.137554Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-05-05T09:58:39.138093Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.138580Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:39.138596Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:39.138681Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-05-05T09:58:39.138715Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:39.138723Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2025-05-05T09:58:39.138730Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2025-05-05T09:58:39.138987Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.139005Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.139011Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2025-05-05T09:58:39.139017Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-05-05T09:58:39.139023Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:39.139183Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.139201Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.139206Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2025-05-05T09:58:39.139210Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-05-05T09:58:39.139215Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-05-05T09:58:39.139228Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2025-05-05T09:58:39.139233Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:289:2276] 2025-05-05T09:58:39.140143Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.140231Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-05-05T09:58:39.140253Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2025-05-05T09:58:39.140288Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2025-05-05T09:58:39.140298Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:39.140304Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2025-05-05T09:58:39.140310Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2025-05-05T09:58:39.140718Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:39.140746Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-05-05T09:58:39.140753Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:704:2644] TestWaitNotification: OK eventTxId 102 >> test.py::test[pg-tpcds-q45-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q48-default.txt-Results] >> test.py::test[hor_join-skip_sampling--ForceBlocks] [GOOD] >> test.py::test[hor_join-skip_sampling--Results] >> test.py::test[blocks-combine_all_sum_filter--Results] [GOOD] >> test.py::test[blocks-combine_hashed_pg--Results] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] >> test.py::test[insert-from_two_sorted_by_calc-default.txt-Results] [GOOD] >> test.py::test[insert-insert_from_other--ForceBlocks] >> test_format_setting.py::TestS3::test_string_not_null_multi[v2-date_null/parse_error/multi_null.csv] [GOOD] >> test.py::test[join-left_semi_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--ForceBlocks] >> test.py::test[aggr_factory-every-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] >> test.py::test[join-order_of_qualified--Results] [GOOD] >> test.py::test[join-order_of_qualified-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner_filter-off-Results] [SKIPPED] >> test.py::test[join-premap_map_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_map_semi--Results] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v1] |95.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[in-yql-10038-default.txt-Results] [GOOD] >> test.py::test[insert-append_proto_fail--Results] >> test.py::test[join-premap_merge_with_remap--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_with_remap--Results] >> test.py::test[join-premap_common_inner_both_sides-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-Results] [SKIPPED] >> test.py::test[join-premap_map_cross--ForceBlocks] |95.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[join-mergejoin_with_different_key_names--Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--ForceBlocks] >> test.py::test[pg-tpch-q11-default.txt-Results] [GOOD] >> test.py::test[produce-fuse_reduces_with_presort--ForceBlocks] >> test.py::test[join-anyjoin_common_nodup--Results] [GOOD] >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-count_bans--Results] >> test_ydb_backup.py::TestBackupRestoreInRoot::test_table_backup_restore_in_root >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] [GOOD] >> test.py::test[aggregate-group_by_tablerow_column--Results] >> test.py::test[pg-tpch-q01-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--Results] [SKIPPED] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] >> test.py::test[sampling-read--ForceBlocks] [GOOD] >> test.py::test[sampling-read--Results] >> test.py::test[in-huge_in-default.txt-Results] [GOOD] >> test.py::test[join-left_all-off-ForceBlocks] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-ForceBlocks] >> test.py::test[join-left_all-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner--ForceBlocks] >> test.py::test[blocks-date_greater--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater--Results] >> test.py::test[tpch-q3-default.txt-Results] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-ForceBlocks] >> test.py::test[key_filter-between_with_key_filter--ForceBlocks] [GOOD] >> test.py::test[key_filter-between_with_key_filter--Results] >> test.py::test[view-view_with_library--ForceBlocks] [GOOD] >> test.py::test[view-view_with_library--Results] >> test.py::test[result_types-singular-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_topsort-default.txt-Results] >> test.py::test[hor_join-skip_sampling--Results] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_expr--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr--Results] >> test.py::test[pg-tpch-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-Results] >> test.py::test[blocks-add_uint64--Results] [GOOD] >> test.py::test[blocks-combine_all_max--Results] >> test.py::test[insert-append_proto_fail--Results] [GOOD] >> test.py::test[insert-drop_sortness--Results] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-json_each_row] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q48-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q60-default.txt-Results] >> test.py::test[schema-skip_complex_type--ForceBlocks] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-csv_with_names] >> test.py::test[sampling-read--Results] [GOOD] >> test.py::test[sampling-reduce--ForceBlocks] [SKIPPED] >> test.py::test[sampling-reduce--Results] [SKIPPED] >> test.py::test[sampling-subquery_multiple_sample-default.txt-ForceBlocks] >> test.py::test[schema-skip_complex_type--Results] >> test.py::test[blocks-add_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Results] >> test.py::test[view-view_with_library--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--ForceBlocks] >> test.py::test[blocks-combine_all_count_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count_filter--Results] >> test.py::test[join-premap_merge_with_remap--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--ForceBlocks] >> test.py::test[key_filter-between_with_key_filter--Results] [GOOD] >> test.py::test[key_filter-contains_optional--ForceBlocks] >> test.py::test[order_by-order_by_expr--Results] [GOOD] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--ForceBlocks] >> test.py::test[join-flatten_columns2--Results] [GOOD] >> test.py::test[join-inner_trivial_from_concat--Results] >> test.py::test[blocks-add_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-ForceBlocks] >> test.py::test[insert-insert_from_other--ForceBlocks] [GOOD] >> test.py::test[insert-insert_from_other--Results] >> test.py::test[join-lookupjoin_semi_1o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Results] >> test.py::test[aggr_factory-mode-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Results] >> test.py::test[limit-limit-dynamic-ForceBlocks] >> test_ydb_backup.py::TestBackupRestoreInRoot::test_table_backup_restore_in_root [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] [GOOD] >> test.py::test[window-current/ansi_current--Results] [GOOD] >> test.py::test[window-distinct_over_window_full_frames--Results] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] >> test.py::test[schema-skip_complex_type--Results] [GOOD] >> test.py::test[select-append_to_value--ForceBlocks] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[schema-insert_sorted-read_schema-Results] >> test.py::test[produce-process_and_filter-default.txt-Results] >> test.py::test[sampling-system_sampling-io_block_size-ForceBlocks] [SKIPPED] >> test.py::test[sampling-system_sampling-io_block_size-Results] [SKIPPED] >> test.py::test[sampling-table_content--ForceBlocks] >> test.py::test[join-premap_map_semi--Results] [GOOD] >> test.py::test[join-premap_merge_inner-off-Results] >> test.py::test[blocks-combine_all_count_filter--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--ForceBlocks] >> test.py::test[join-premap_merge_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_with_remap--Results] >> test.py::test[join-lookupjoin_inner--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner--Results] >> test.py::test[distinct-distinct_star-default.txt-Results] [GOOD] >> test.py::test[insert-drop_sortness--Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas1--Results] >> test.py::test[insert-insert_relabeled-default.txt-Results] >> test.py::test[join-premap_common_right_tablecontent--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Results] >> test.py::test[dq-blacklisted_pragmas1--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel--Results] [SKIPPED] >> test.py::test[dq-truncate_local-default.txt-Results] [SKIPPED] >> test.py::test[expr-inline_call--Results] >> test.py::test[join-premap_map_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_map_cross--Results] >> test.py::test[union_all-mix_map_and_project-trivial_map-ForceBlocks] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] >> test.py::test[insert-insert_from_other--Results] [GOOD] >> test.py::test[insert-override-with_read_udf-ForceBlocks] >> test.py::test[aggregate-group_by_tablerow_column--Results] [GOOD] >> test.py::test[bigdate-table_yt_native-wo_compat-Results] [SKIPPED] >> test.py::test[bigdate-tz_table_fill--Results] >> test.py::test[sampling-bind_topsort-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read-dynamic-Results] >> test.py::test[pg-tpcds-q60-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Results] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-Results] >> test.py::test[join-lookupjoin_semi_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-ForceBlocks] >> test.py::test[blocks-date_greater--Results] [GOOD] >> test.py::test[blocks-date_less_scalar--ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_map--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] |95.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_scripting.py::TestExecuteScriptFromStdinWithWideOutput::test_wide_table [GOOD] >> test.py::test[join-lookupjoin_inner--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o--ForceBlocks] >> test.py::test[pg-tpch-q16-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_fail--ForceBlocks] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Results] [SKIPPED] >> test.py::test[produce-native_desc_reduce_with_presort--ForceBlocks] [SKIPPED] >> test.py::test[produce-native_desc_reduce_with_presort--Results] [SKIPPED] >> test.py::test[produce-process_pure_with_sort-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_multiple_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] >> test.py::test[blocks-combine_all_max--Results] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Results] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-ForceBlocks] >> test.py::test[join-premap_map_cross--Results] [GOOD] >> test.py::test[join-premap_merge_inner--ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-parquet] >> test.py::test[join-premap_common_right_tablecontent--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--ForceBlocks] >> test.py::test[key_filter-contains_optional--ForceBlocks] [GOOD] >> test.py::test[key_filter-contains_optional--Results] >> test.py::test[weak_field-optimize_weak_fields_map--Results] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--ForceBlocks] >> test.py::test[aggr_factory-mode-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByTwoFields--ForceBlocks] >> test.py::test[select-append_to_value--ForceBlocks] [GOOD] >> test.py::test[select-append_to_value--Results] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] >> test.py::test[schema-insert_sorted-read_schema-Results] [GOOD] >> test.py::test[schema-select_field-read_schema-Results] >> test.py::test[produce-process_and_filter-default.txt-Results] [GOOD] >> test.py::test[produce-process_row_and_columns-default.txt-Results] >> test.py::test[expr-inline_call--Results] [GOOD] >> test.py::test[file-where_key_in_file_content_typed--Results] >> test.py::test[insert-anonymous_tables-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt-Results] >> test.py::test[blocks-combine_all_decimal_max-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] >> test.py::test[limit-limit-dynamic-ForceBlocks] [GOOD] >> test.py::test[limit-limit-dynamic-Results] >> test.py::test[distinct-distinct_count_no_gouping-default.txt-Results] [GOOD] >> test.py::test[dq-pool_trees_whitelist--ForceBlocks] >> test.py::test[insert-insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-ForceBlocks] >> test.py::test[optimizers-yql-6008_limit_after_map--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] >> test.py::test[aggregate-compare_by_nulls-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] >> test.py::test[join-inner_trivial_from_concat--Results] [GOOD] >> test.py::test[join-join_right_cbo--Results] >> test.py::test[blocks-combine_hashed_minmax_nested--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] >> test.py::test[sampling-table_content--ForceBlocks] [GOOD] >> test.py::test[sampling-table_content--Results] >> test.py::test[bigdate-tz_table_fill--Results] [GOOD] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] >> test.py::test[key_filter-contains_optional--Results] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-ForceBlocks] >> test.py::test[select-append_to_value--Results] [GOOD] >> test.py::test[select-boolean_where--ForceBlocks] >> test.py::test[limit-limit-dynamic-Results] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--ForceBlocks] >> test.py::test[produce-fuse_reduces_with_presort--ForceBlocks] [GOOD] >> test.py::test[produce-fuse_reduces_with_presort--Results] >> test.py::test[insert-override-with_read_udf-ForceBlocks] [GOOD] >> test.py::test[insert-override-with_read_udf-Results] >> test.py::test[join-lookupjoin_semi_empty-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_star-off-ForceBlocks] >> test.py::test[sampling-direct_read-dynamic-Results] [GOOD] >> test.py::test[sampling-read-dynamic-Results] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-ForceBlocks] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter--ForceBlocks] >> test.py::test[insert-anonymous_tables-default.txt-Results] [GOOD] >> test.py::test[insert-append_view_fail--ForceBlocks] >> test.py::test[join-count_bans--Results] [GOOD] >> test.py::test[join-equi_join_three_simple-off-Results] [SKIPPED] >> test.py::test[join-full_trivial_udf_call--Results] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] [GOOD] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--ForceBlocks] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--ForceBlocks] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable >> test.py::test[join-premap_merge_with_remap--Results] [GOOD] >> test.py::test[join-pullup_cross-off-Results] [SKIPPED] >> test.py::test[join-pullup_exclusion--Results] >> test.py::test[join-lookupjoin_semi_1o2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o--Results] >> test.py::test[pg-tpcds-q70-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Results] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] >> test.py::test[insert-override-with_read_udf-Results] [GOOD] >> test.py::test[insert-udf_empty--ForceBlocks] >> test.py::test[sampling-table_content--Results] [GOOD] >> test.py::test[join-premap_merge_inner--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_inner--Results] >> test.py::test[schema-select_all-schema-ForceBlocks] >> test.py::test[blocks-combine_hashed_pg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_sum--Results] >> test.py::test[in-in_scalar_vector_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] >> test_statistics.py::TestS3::test_convert[v2-client0-json_each_row-parquet] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Results] [GOOD] >> test.py::test[blocks-combine_hashed_set--Results] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] [GOOD] >> test.py::test[blocks-date_sub_interval--ForceBlocks] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr--ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_list] >> test.py::test[weak_field-weak_field_to_yson--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] >> test.py::test[schema-select_field-read_schema-Results] [GOOD] >> test.py::test[schema-select_simple-default.txt-Results] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] >> test.py::test[insert-append_view_fail--ForceBlocks] [GOOD] >> test.py::test[insert-append_view_fail--Results] [GOOD] >> test.py::test[insert-drop_sortness-desc-ForceBlocks] >> test.py::test[produce-process_pure_with_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_pure_with_sort-default.txt-Results] >> test.py::test[file-where_key_in_file_content_typed--Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--Results] >> test.py::test[produce-process_row_and_columns-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-Results] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] [GOOD] >> test.py::test[order_by-changed_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-literal_complex--Results] [SKIPPED] >> test.py::test[order_by-sort--Results] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--ForceBlocks] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-several1-default.txt-Results] >> test.py::test[aggregate-GroupByTwoFields--ForceBlocks] [GOOD] >> test.py::test[aggregate-GroupByTwoFields--Results] >> test.py::test[binding-table_from_binding_inferscheme-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] >> test.py::test[schema-select_all-row_spec_hide_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] >> test.py::test[join-premap_merge_extrasort2--Results] >> test.py::test[dq-pool_trees_whitelist--ForceBlocks] [GOOD] >> test.py::test[dq-pool_trees_whitelist--Results] [SKIPPED] >> test.py::test[epochs-read_modified--ForceBlocks] >> test.py::test[join-lookupjoin_semi_1o2o--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache--ForceBlocks] >> test.py::test[weak_field-weak_field_to_yson--Results] [GOOD] >> test.py::test[window-full/aggregations_compact--ForceBlocks] >> test.py::test[join-premap_merge_inner--Results] [GOOD] >> test.py::test[join-premap_no_premap--ForceBlocks] >> test.py::test[produce-process_pure_with_sort-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python-default.txt-ForceBlocks] >> test.py::test[limit-limit_over_sort_desc_in_subquery--ForceBlocks] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] >> test.py::test[schema-select_all-row_spec_hide_sort-Results] [GOOD] >> test.py::test[sampling-read-dynamic-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range_empty_fail--ForceBlocks] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] >> test.py::test[pg-tpcds-q78-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-off-Results] [SKIPPED] >> test.py::test[join-starjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-starjoin_unused_keys--Results] [SKIPPED] >> test.py::test[blocks-combine_all_min_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] >> test.py::test[json-json_query/example--ForceBlocks] >> test.py::test[select-boolean_where--ForceBlocks] [GOOD] >> test.py::test[select-boolean_where--Results] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] [GOOD] >> test.py::test[join-simple_columns_partial--ForceBlocks] >> test.py::test[aggregate-GroupByTwoFields--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--ForceBlocks] >> test.py::test[join-join_right_cbo--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access--Results] >> test.py::test[join-mapjoin_early_rewrite_star-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-ForceBlocks] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] >> test.py::test[schema-select_all-schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-schema-Results] >> test.py::test[insert-udf_empty--ForceBlocks] [GOOD] >> test.py::test[insert-udf_empty--Results] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] >> test.py::test[schema-select_simple-default.txt-Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] >> test.py::test[join-full_trivial_udf_call--Results] [GOOD] >> test.py::test[join-grace_join2--Results] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTableWithChangefeed [GOOD] >> test.py::test[order_by-sort--Results] [GOOD] >> test.py::test[pg-in_mixed--Results] >> test.py::test[insert_monotonic-several1-default.txt-Results] [GOOD] >> test.py::test[join-bush_dis_in-off-Results] [SKIPPED] >> test.py::test[join-count_bans-off-Results] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk_eval--Results] >> test.py::test[blocks-combine_all_min_filter--Results] [GOOD] >> test.py::test[join-grace_join2--Results] [SKIPPED] >> test.py::test[join-inner_all_right-off-Results] [SKIPPED] >> test.py::test[join-inner_on_key_only-off-Results] [SKIPPED] >> test.py::test[join-inner_trivial--Results] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] [GOOD] >> test.py::test[lineage-flatten_by--ForceBlocks] >> test.py::test[aggregate-group_by_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr--Results] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_list] [GOOD] >> test.py::test[insert-drop_sortness-desc-ForceBlocks] [GOOD] >> test.py::test[blocks-date_add_interval--ForceBlocks] >> test.py::test[select-boolean_where--Results] [GOOD] >> test.py::test[select-create_tuples-default.txt-ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_each_row] >> test.py::test[schema-select_all_inferschema_range_empty_fail--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_range_empty_fail--Results] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-ForceBlocks] >> test.py::test[insert-drop_sortness-desc-Results] >> test.py::test[schema-select_all-schema-Results] [GOOD] >> test.py::test[schema-select_field-row_spec-ForceBlocks] >> test.py::test[blocks-combine_hashed_set--Results] [GOOD] >> test.py::test[blocks-combine_hashed_some--Results] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-ForceBlocks] >> test.py::test[blocks-date_sub_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_interval--Results] >> test.py::test[insert-udf_empty--Results] [GOOD] >> test.py::test[insert_monotonic-break_unique_fail--ForceBlocks] [SKIPPED] >> test.py::test[insert_monotonic-break_unique_fail--Results] [SKIPPED] >> test.py::test[join-aggr_diff_order-default.txt-ForceBlocks] >> test.py::test[produce-process_streaming_count-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Results] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] [GOOD] >> test.py::test[in-in_types_cast-default.txt-ForceBlocks] >> test.py::test[join-premap_merge_extrasort2--Results] [GOOD] >> test.py::test[join-star_join_multi-off-ForceBlocks] >> test.py::test[blocks-combine_hashed_sum--Results] [GOOD] >> test.py::test[blocks-complex_scalars--Results] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null_multi_key--ForceBlocks] >> test.py::test[join-pullup_exclusion--Results] [GOOD] >> test.py::test[join-three_equalities--Results] >> test.py::test[produce-process_with_python-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_python-default.txt-Results] >> test.py::test[insert-drop_sortness-desc-Results] [GOOD] >> test.py::test[join-equi_join_by_expr-off-ForceBlocks] >> test.py::test[pg-tpcds-q85-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q20-default.txt-Results] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_count--Results] >> test.py::test[sampling-subquery_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-table_content--Results] >> test.py::test[aggregate-group_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--ForceBlocks] >> test.py::test[join-lookupjoin_with_cache--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_with_cache--Results] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] [GOOD] >> test.py::test[order_by-literal_complex--ForceBlocks] >> test.py::test[weak_field-yql-7888_mapfieldsubset--ForceBlocks] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTableWithChangefeed [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:55.341376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:55.341401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:55.341406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:55.341411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:55.341416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:55.341420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:55.341429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:55.341441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:55.341531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:55.341600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:55.354586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:55.354608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:55.354720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:55.356600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:55.356637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:55.356666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:55.357968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:55.358035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:55.358157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:55.358201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:55.358761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:55.359098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:55.359113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:55.359173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:55.359181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:55.359188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:55.359214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:55.360883Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:55.381284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:55.381370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.381437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:55.381510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:55.381527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.382264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:55.382298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:55.382368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.382379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:55.382385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:55.382391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:55.382860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.382873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:55.382879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:55.383368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.383384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.383389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:55.383397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:55.384067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:55.384620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:55.384668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:55.384878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:55.384910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:55.384919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:55.384980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:55.384990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:55.385025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:55.385038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:55.385508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:55.385516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:55.385568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:55.385574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:55.385584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.385591Z node 1 :FLAT_TX_SCHEMESHARD I ... LAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T09:58:52.879430Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:52.879601Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.879612Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.879616Z node 169 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:52.879621Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 7 2025-05-05T09:58:52.879627Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T09:58:52.879638Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:52.880134Z node 169 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:58:52.880159Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:58:52.880180Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-05-05T09:58:52.880387Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:58:52.880394Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:58:52.880400Z node 169 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T09:58:52.880468Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:58:52.880486Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 725849475180 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:58:52.880494Z node 169 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T09:58:52.880519Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:58:52.880528Z node 169 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:52.880532Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:52.880537Z node 169 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:58:52.880541Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:52.880549Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:52.880559Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T09:58:52.880564Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:58:52.880570Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:58:52.880574Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:58:52.880578Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:58:52.880587Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T09:58:52.880593Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:58:52.880600Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T09:58:52.880604Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-05-05T09:58:52.880792Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.880852Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.881089Z node 169 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:58:52.881097Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:58:52.881125Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T09:58:52.881148Z node 169 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:58:52.881153Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [169:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:58:52.881158Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [169:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:58:52.881303Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.881314Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.881319Z node 169 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:52.881324Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T09:58:52.881329Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:58:52.881422Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.881432Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.881436Z node 169 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:58:52.881441Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-05T09:58:52.881445Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T09:58:52.881456Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:58:52.881464Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [169:123:2149] 2025-05-05T09:58:52.881487Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:58:52.881493Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-05T09:58:52.881502Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:58:52.881998Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.882283Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:58:52.882306Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:58:52.882317Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:58:52.882328Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:58:52.882336Z node 169 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:58:52.882341Z node 169 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:58:52.882346Z node 169 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T09:58:52.882865Z node 169 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T09:58:52.882946Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T09:58:52.882954Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T09:58:52.883020Z node 169 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T09:58:52.883036Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T09:58:52.883041Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [169:975:2878] TestWaitNotification: OK eventTxId 1004 >> test.py::test[produce-process_with_python-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-sorted-ForceBlocks] >> test.py::test[produce-fuse_reduces_with_presort--Results] [GOOD] >> test.py::test[produce-process_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_multi_out--Results] [SKIPPED] >> test.py::test[produce-reduce_subfields-sorted-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_subfields-sorted-Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream--Results] [SKIPPED] >> test.py::test[ql_filter-integer_eval--ForceBlocks] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] >> test.py::test[pg-in_mixed--Results] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] >> test.py::test[flatten_by-flatten_dict--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_join--Results] [SKIPPED] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] >> test.py::test[join-premap_no_premap--ForceBlocks] [GOOD] >> test.py::test[join-premap_no_premap--Results] >> test.py::test[blocks-date_sub_interval--Results] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_small_primary--ForceBlocks] >> test.py::test[window-distinct_over_window_full_frames--Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] [GOOD] >> test.py::test[window-full/leadlag_compact--Results] >> test.py::test[schema-skip_complex_type--Results] >> test.py::test[join-simple_columns_partial--ForceBlocks] [GOOD] >> test.py::test[join-simple_columns_partial--Results] >> test.py::test[epochs-read_modified--ForceBlocks] [GOOD] >> test.py::test[epochs-read_modified--Results] >> test_ydb_backup.py::TestBackupRestoreInRootSchemeOnly::test_table_backup_restore_in_root_scheme_only >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-json_each_row] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] [GOOD] >> test.py::test[window-full/syscolumns--ForceBlocks] >> test.py::test[select-create_tuples-default.txt-ForceBlocks] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-csv_with_names] >> test.py::test[select-create_tuples-default.txt-Results] >> test.py::test[join-lookupjoin_with_cache--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence--ForceBlocks] >> test.py::test[window-full/aggregations_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations_compact--Results] >> test.py::test[select-dict_with_few_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Results] >> test.py::test[json-json_query/example--ForceBlocks] [GOOD] >> test.py::test[json-json_query/example--Results] >> test.py::test[produce-process_with_udf-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt-Results] >> test.py::test[schema-select_field-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-select_field-row_spec-Results] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables >> test.py::test[pg-join_using_tables1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-Results] >> test.py::test[join-inner_trivial--Results] [GOOD] >> test.py::test[join-left_null_literal-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o--Results] >> test.py::test[join-join_without_correlation_and_struct_access--Results] [GOOD] >> test.py::test[join-left_join_null_column-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] >> test.py::test[join-aggr_diff_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-aggr_diff_order-default.txt-Results] >> test.py::test[blocks-date_add_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-date_add_interval--Results] >> test.py::test[lineage-flatten_by--ForceBlocks] [GOOD] >> test.py::test[lineage-flatten_by--Results] >> test.py::test[in-in_types_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum--ForceBlocks] >> test.py::test[key_filter-is_null_multi_key--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_multi_key--Results] >> test.py::test[epochs-read_modified--Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_column_fail--ForceBlocks] >> test.py::test[blocks-combine_hashed_some--Results] [GOOD] >> test.py::test[blocks-date_greater--Results] >> test.py::test[blocks-combine_all_count--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Results] >> test.py::test[blocks-complex_scalars--Results] [GOOD] >> test.py::test[blocks-date_sub--Results] >> test.py::test[join-equi_join_by_expr-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_by_expr-off-Results] [SKIPPED] >> test.py::test[join-flatten_columns2-off-ForceBlocks] >> test.py::test[select-create_tuples-default.txt-Results] [GOOD] >> test.py::test[select-substring_v1-default.txt-ForceBlocks] >> test.py::test[schema-select_field-row_spec-Results] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-ForceBlocks] >> test.py::test[select-dict_with_few_keys-default.txt-Results] [GOOD] >> test.py::test[select-result_size_limit_with_fill--ForceBlocks] [SKIPPED] >> test.py::test[select-result_size_limit_with_fill--Results] >> test.py::test[order_by-literal_complex--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_complex--Results] >> test.py::test[select-result_size_limit_with_fill--Results] [SKIPPED] >> test.py::test[aggregate-group_by_expr_mul_col--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--Results] >> test.py::test[ql_filter-integer_eval--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_eval--Results] >> test.py::test[sampling-table_content--Results] [GOOD] >> test.py::test[schema-copy-other-Results] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Results] >> test.py::test[key_filter-is_null_multi_key--Results] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--ForceBlocks] >> test.py::test[pg-join_using_tables1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-ForceBlocks] >> test.py::test[json-json_query/example--Results] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--ForceBlocks] >> test.py::test[lineage-flatten_by--Results] [GOOD] >> test.py::test[lineage-grouping_sets--ForceBlocks] >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-ForceBlocks] >> test.py::test[schema-skip_complex_type--Results] [GOOD] >> test.py::test[schema-user_schema_mix1--Results] >> test.py::test[expr-non_persistable_group_by_column_fail--ForceBlocks] [GOOD] |95.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[select-result_size_limit_with_fill--Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_column_fail--Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field--ForceBlocks] >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] [GOOD] >> test.py::test[order_by-literal_complex--Results] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--ForceBlocks] >> test.py::test[ql_filter-integer_eval--Results] [GOOD] >> test.py::test[ql_filter-integer_members_eval--ForceBlocks] >> test.py::test[join-mergejoin_small_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_small_primary--Results] >> test.py::test[join-three_equalities--Results] [GOOD] >> test.py::test[join-trivial_view--Results] >> test.py::test[produce-reduce_multi_in-sorted-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in-sorted-Results] >> test.py::test[join-aggr_diff_order-default.txt-Results] [GOOD] >> test.py::test[join-cbo_7tables_only_common_join--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_7tables_only_common_join--Results] [SKIPPED] >> test.py::test[join-from_in_front_join--ForceBlocks] >> test_ydb_backup.py::TestBackupRestoreInRootSchemeOnly::test_table_backup_restore_in_root_scheme_only [GOOD] >> test.py::test[blocks-date_add_interval--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join--ForceBlocks] >> test.py::test[blocks-date_group_by--ForceBlocks] >> test.py::test[join-star_join_multi-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_multi-off-Results] [SKIPPED] >> test.py::test[join-two_aggrs-default.txt-ForceBlocks] >> test.py::test[join-simple_columns_partial--Results] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-csv_with_names] [GOOD] >> test.py::test[window-full/aggregations_compact--Results] [GOOD] >> test.py::test[window-full/leadlag--ForceBlocks] >> test.py::test[join-equi_join_three_asterisk_eval--Results] [GOOD] >> test.py::test[join-equi_join_three_simple--Results] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-parquet] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--Results] [SKIPPED] >> test.py::test[action-nested_subquery--Results] >> test.py::test[join-mapjoin_early_rewrite_sequence--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] >> test.py::test[join-premap_no_premap--Results] [GOOD] >> test.py::test[join-premap_no_premap-off-ForceBlocks] >> test.py::test[select-substring_v1-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-substring_v1-default.txt-Results] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] >> test.py::test[hor_join-group_ranges--Results] >> test.py::test[produce-reduce_by_struct-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-sorted-Results] >> test.py::test[join-mergejoin_small_primary--Results] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-ForceBlocks] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] >> test.py::test[blocks-date_sub_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] >> test.py::test[pg-tpcds-q03-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] >> test.py::test[blocks-date_sub_interval_scalar--Results] >> test.py::test[join-flatten_columns2-off-ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns2-off-Results] [SKIPPED] >> test.py::test[schema-user_schema_mix1--Results] [GOOD] >> test.py::test[select-autoextract_source_value-default.txt-Results] >> test.py::test[produce-reduce_multi_in-sorted-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype_assume--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_sampling--ForceBlocks] >> test.py::test[schema-copy-other-Results] [GOOD] >> test.py::test[schema-diffrerent_schemas--Results] >> test.py::test[pg-tpcds-q03-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Results] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v2] >> test.py::test[blocks-date_less_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less_scalar--Results] >> test.py::test[key_filter-part_key_over_dynamic--ForceBlocks] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--Results] >> test.py::test[select-substring_v1-default.txt-Results] [GOOD] >> test.py::test[select-trivial_having-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-ForceBlocks] >> test.py::test[key_filter-empty_range_over_dynamic--ForceBlocks] [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--Results] >> test.py::test[in-in_types_cast_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-Results] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_existing_column--ForceBlocks] |95.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[join-flatten_columns2-off-Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_grouping_hum--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] >> test.py::test[ql_filter-integer_members_eval--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_members_eval--Results] >> test.py::test[action-nested_subquery--Results] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Results] >> test.py::test[pg-tpcds-q03-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_one_field--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_one_field--Results] >> test.py::test[window-full/leadlag_compact--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--Results] >> test.py::test[order_by-order_by_expr_over_sorted_table--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] >> test.py::test[window-full/syscolumns--ForceBlocks] [GOOD] >> test.py::test[window-full/syscolumns--Results] >> test.py::test[join-from_in_front_join--ForceBlocks] [GOOD] >> test.py::test[join-from_in_front_join--Results] >> test.py::test[lineage-grouping_sets--ForceBlocks] [GOOD] >> test.py::test[lineage-grouping_sets--Results] >> test.py::test[key_filter-part_key_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter--ForceBlocks] >> test.py::test[aggregate-group_by_expr_with_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join--Results] >> test.py::test[blocks-date_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_unary--ForceBlocks] >> test.py::test[key_filter-empty_range_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-tzdate--ForceBlocks] >> test.py::test[in-in_types_cast_all-default.txt-Results] [GOOD] >> test.py::test[insert-append--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--ForceBlocks] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] >> test.py::test[ql_filter-integer_members_eval--Results] [GOOD] >> test.py::test[ql_filter-integer_single--ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_subst--Results] >> test.py::test[pg-tpcds-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-Results] >> test.py::test[join-two_aggrs-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-two_aggrs-default.txt-Results] >> test.py::test[join-trivial_view--Results] [GOOD] >> test.py::test[join-yql-12022-off-Results] [SKIPPED] >> test.py::test[json-json_value/example--Results] >> test.py::test[blocks-date_group_by--ForceBlocks] [GOOD] >> test.py::test[blocks-date_group_by--Results] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] [GOOD] >> test.py::test[order_by-order_by_expr_simple--ForceBlocks] >> test.py::test[schema-diffrerent_schemas--Results] [GOOD] >> test.py::test[schema-read_schema_change_other--Results] >> test_statistics.py::TestS3::test_convert[v2-client0-csv_with_names-parquet] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_list] >> test.py::test[flatten_by-flatten_one_field--Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_agg--Results] >> test.py::test[join-from_in_front_join--Results] [GOOD] >> test.py::test[join-full_equal_not_null-off-ForceBlocks] >> test.py::test[join-mergejoin_small_primary-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side--ForceBlocks] >> test.py::test[select-autoextract_source_value-default.txt-Results] [GOOD] >> test.py::test[select-bit_ops-default.txt-Results] >> test.py::test[hor_join-group_ranges--Results] [GOOD] >> test.py::test[hor_join-yield_on-default.txt-Results] >> test.py::test[join-premap_no_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_no_premap-off-Results] [SKIPPED] >> test.py::test[join-pullup_random-off-ForceBlocks] >> test.py::test[window-full/leadlag--ForceBlocks] [GOOD] >> test.py::test[window-full/leadlag--Results] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] [GOOD] >> test.py::test[ansi_idents-join_using-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_existing_column--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_existing_column--Results] >> test.py::test[schema-read_schema_change_other--Results] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Results] >> test.py::test[produce-reduce_multi_in-sorted-Results] [GOOD] >> test.py::test[produce-reduce_typeinfo--Results] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order--Results] [SKIPPED] >> test.py::test[sampling-bind_expr_udf--Results] >> test.py::test[aggregate-group_by_expr_with_join--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_start--ForceBlocks] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] [GOOD] >> test.py::test[join-yql-14847-off-ForceBlocks] >> test.py::test[select-trivial_having-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_having-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort--ForceBlocks] >> test.py::test[blocks-date_group_by--Results] [GOOD] >> test.py::test[blocks-date_sub_scalar--ForceBlocks] >> test.py::test[produce-reduce_multi_in_sampling--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Results] >> test.py::test[pg-tpch-q20-default.txt-Results] [GOOD] >> test.py::test[pg-wide_top_sort--Results] >> test.py::test[lineage-grouping_sets--Results] [GOOD] >> test.py::test[lineage-union_all_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-aggregate_over_aggregate--ForceBlocks] >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] >> test.py::test[join-equi_join_three_simple--Results] [GOOD] >> test.py::test[join-full_equal_not_null-off-Results] [SKIPPED] >> test.py::test[join-full_join--Results] >> test.py::test[join-two_aggrs-default.txt-Results] [GOOD] >> test.py::test[join-yql-8980-off-ForceBlocks] >> test.py::test[blocks-decimal_unary--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_unary--Results] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] >> test.py::test[schema-user_schema_existing_column--Results] [GOOD] >> test.py::test[schema-user_schema_mix1--ForceBlocks] >> test.py::test[ql_filter-integer_single--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single--Results] >> test.py::test[blocks-date_less_scalar--Results] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--ForceBlocks] >> test.py::test[window-full/syscolumns--Results] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-ForceBlocks] >> test.py::test[key_filter-yql-8117-table_key_filter--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] |95.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] >> test.py::test[insert-append--ForceBlocks] [GOOD] >> test.py::test[insert-append--Results] |95.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] >> test.py::test[select-trivial_having-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-many-ForceBlocks] >> test.py::test[blocks-decimal_unary--Results] [GOOD] >> test.py::test[blocks-interval_add_interval--ForceBlocks] >> test.py::test[order_by-order_by_expr_simple--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Results] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-less_outs--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-less_outs--Results] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--ForceBlocks] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--Results] [SKIPPED] >> test.py::test[in-in_tuple_table-default.txt-ForceBlocks] >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_list] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_each_row] >> test.py::test[flatten_by-flatten_one_field_another--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another--Results] >> test.py::test[join-opt_on_opt_side--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single--Results] [GOOD] >> test.py::test[sampling-bind_default-default.txt-ForceBlocks] >> test.py::test[select-bit_ops-default.txt-Results] [GOOD] >> test.py::test[select-calculated_values-default.txt-Results] >> test.py::test[blocks-date_sub--Results] [GOOD] >> test.py::test[blocks-div_uint64--Results] >> test.py::test[aggr_factory-avg_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi--Results] >> test.py::test[produce-reduce_multi_in_sampling--Results] [GOOD] >> test.py::test[ql_filter-integer_bounds--ForceBlocks] >> test.py::test[window-full/leadlag--Results] [GOOD] >> test.py::test[window-full/session_compact--ForceBlocks] >> test.py::test[blocks-date_greater--Results] [GOOD] >> test.py::test[blocks-date_not_equals--Results] >> test.py::test[join-full_equal_not_null-off-ForceBlocks] [GOOD] >> test.py::test[join-full_equal_not_null-off-Results] [SKIPPED] >> test.py::test[blocks-date_greater_or_equal--Results] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--Results] >> test.py::test[insert-append--Results] [GOOD] >> test.py::test[join-alias_where_group--ForceBlocks] >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-Results] >> test.py::test[pg-tpcds-q14-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] >> test.py::test[join-pullup_random-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_random-off-Results] [SKIPPED] >> test.py::test[join-right_trivial-off-ForceBlocks] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt-ForceBlocks] |95.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[join-opt_on_opt_side--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Results] [GOOD] >> test.py::test[order_by-sort_with_take_limit--ForceBlocks] >> test.py::test[json-json_value/example--Results] [GOOD] >> test.py::test[hor_join-yield_on-default.txt-Results] [GOOD] >> test.py::test[key_filter-convert--Results] >> test.py::test[in-in_ansi_join--Results] >> test.py::test[aggregate-group_by_hop_only_start--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_only_start--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_ru_ru--ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_subst--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o--Results] >> test.py::test[ansi_idents-join_using-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-join_using-default.txt-Results] >> test.py::test[sampling-bind_expr_udf--Results] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Results] |95.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[join-full_equal_not_null-off-Results] [SKIPPED] >> test.py::test[optimizers-aggregate_over_aggregate--ForceBlocks] [GOOD] >> test.py::test[optimizers-aggregate_over_aggregate--Results] >> test.py::test[key_filter-tzdate--ForceBlocks] [GOOD] >> test.py::test[key_filter-tzdate--Results] >> test.py::test[flatten_by-flatten_one_field_another--Results] [GOOD] >> test.py::test[hor_join-max_in_tables--ForceBlocks] >> test.py::test[join-yql-14847-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-14847-off-Results] [SKIPPED] >> test.py::test[join-yql-8131-off-ForceBlocks] [SKIPPED] >> test.py::test[join-yql-8131-off-Results] [SKIPPED] >> test.py::test[join-yql-8980--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_continue_mode.py::TestContinueMode::test_continue_from_offsets[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3714800) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:58:30] send response localhost:7356/?database=local ::1 - - [05/May/2025 09:58:30] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - >> test.py::test[window-full/noncompact_with_nulls_tuple_key--Results] [GOOD] >> test.py::test[window-full/noncompact_with_tablerow--Results] >> test.py::test[schema-user_schema_mix1--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix1--Results] >> test.py::test[join-yql-8980-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-8980-off-Results] [SKIPPED] >> test.py::test[key_filter-lambda_with_null_filter--ForceBlocks] >> test.py::test[pg-wide_top_sort--Results] [GOOD] >> test.py::test[pragma-config_exec--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:48.436452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:48.436490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:48.436496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:48.436501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:48.436507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:48.436511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:48.436519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:48.436533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:48.436638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:48.436711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:48.449560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:48.449587Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:48.449667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:48.451775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:48.451830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:48.451857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:48.453076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:48.453146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:48.453273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:48.453321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:48.453964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:48.454247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:48.454262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:48.454307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:48.454315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:48.454322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:48.454348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:48.456507Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:48.476639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:48.476715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.476775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:48.476839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:48.476850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.477686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:48.477711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:48.477775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.477784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:48.477789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:48.477794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:48.478340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.478354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:48.478359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:48.478953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.478972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.478978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:48.478985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:48.479594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:48.480120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:48.480157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:48.480347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:48.480373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:48.480380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:48.480436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:48.480443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:48.480476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:48.480488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:48.481062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:48.481074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:48.481120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:48.481126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:48.481138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:48.481144Z node 1 :FLAT_TX_SCHEMESHARD I ... chemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.133487Z node 237 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:59:06.133491Z node 237 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2025-05-05T09:59:06.133493Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:59:06.133505Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:59:06.133636Z node 237 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:59:06.133722Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:59:06.133726Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T09:59:06.133730Z node 237 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T09:59:06.134122Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T09:59:06.134150Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-05T09:59:06.134229Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:59:06.134256Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 1017907251307 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:59:06.134265Z node 237 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-05T09:59:06.134299Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T09:59:06.134309Z node 237 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:59:06.134313Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:59:06.134319Z node 237 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T09:59:06.134322Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:59:06.134334Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:59:06.134343Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:59:06.134349Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T09:59:06.134356Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T09:59:06.134361Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T09:59:06.134365Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T09:59:06.134375Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T09:59:06.134382Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T09:59:06.134386Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T09:59:06.134390Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T09:59:06.135187Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-05-05T09:59:06.135202Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-05T09:59:06.135218Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.135419Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T09:59:06.135441Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-05T09:59:06.135559Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:59:06.135594Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.135659Z node 237 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:59:06.135666Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:59:06.135712Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T09:59:06.135733Z node 237 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:59:06.135737Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [237:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T09:59:06.135740Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [237:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T09:59:06.135909Z node 237 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.135923Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.135929Z node 237 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:59:06.135933Z node 237 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T09:59:06.135951Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:59:06.136086Z node 237 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.136093Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.136095Z node 237 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T09:59:06.136098Z node 237 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T09:59:06.136101Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T09:59:06.136109Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T09:59:06.136113Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [237:125:2151] 2025-05-05T09:59:06.136132Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:59:06.136135Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T09:59:06.136142Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T09:59:06.136740Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.137037Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T09:59:06.137066Z node 237 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T09:59:06.137079Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T09:59:06.137090Z node 237 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:59:06.137095Z node 237 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T09:59:06.137101Z node 237 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-05T09:59:06.137158Z node 237 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:59:06.137494Z node 237 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T09:59:06.137560Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T09:59:06.137569Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T09:59:06.137645Z node 237 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T09:59:06.137661Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T09:59:06.137666Z node 237 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [237:890:2820] TestWaitNotification: OK eventTxId 1003 >> test.py::test[window-row_number_no_part_from_subq-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_agg--Results] [GOOD] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Results] [SKIPPED] >> test.py::test[bigdate-table_io-default.txt-ForceBlocks] >> test.py::test[ansi_idents-join_using-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_yt_key_filter--ForceBlocks] [SKIPPED] >> test.py::test[bigdate-tz_table_yt_key_filter--Results] [SKIPPED] >> test.py::test[blocks-add_int16--ForceBlocks] >> test.py::test[select-trivial_where-many-ForceBlocks] [GOOD] >> test.py::test[select-trivial_where-many-Results] >> test.py::test[pg-tpcds-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-ForceBlocks] >> test.py::test[in-in_tuple_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Results] >> test.py::test[blocks-interval_add_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_interval--Results] >> test.py::test[optimizers-aggregate_over_aggregate--Results] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--ForceBlocks] >> test.py::test[schema-user_schema_mix1--Results] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-ForceBlocks] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-json_each_row] [GOOD] >> test.py::test[blocks-div_uint64--Results] [GOOD] >> test.py::test[blocks-exists--Results] >> test.py::test[schema-user_schema_bind-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_existing_column--Results] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-csv_with_names] >> test.py::test[join-full_join--Results] [GOOD] >> test.py::test[join-inner_all-off-Results] [SKIPPED] >> test.py::test[join-inner_grouped-off-Results] [SKIPPED] >> test.py::test[join-join_left_cbo--Results] >> test.py::test[sampling-bind_default-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] >> test.py::test[select-calculated_values-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] >> test.py::test[ql_filter-integer_bounds--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_bounds--Results] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_4func--ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort--Results] >> test.py::test[key_filter-convert--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] >> test.py::test[join-alias_where_group--ForceBlocks] [GOOD] >> test.py::test[join-alias_where_group--Results] >> test.py::test[select-trivial_where-many-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-ForceBlocks] >> test.py::test[key_filter-tzdate--Results] [GOOD] >> test.py::test[key_filter-yql-14157--ForceBlocks] >> test.py::test[in-in_tuple_table-default.txt-Results] [GOOD] >> test.py::test[in-yql-14677-default.txt-ForceBlocks] >> test.py::test[lambda-lambda_with_tie-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] >> test.py::test[sampling-bind_small_rate-default.txt-Results] [GOOD] >> test.py::test[sampling-map--Results] >> test.py::test[aggregate-group_by_rollup_grouping--ForceBlocks] >> test.py::test[sampling-bind_default-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-ForceBlocks] >> test.py::test[ql_filter-integer_bounds--Results] [GOOD] >> test.py::test[sampling-bind_expr_udf--ForceBlocks] >> test.py::test[schema-user_schema_mix3--ForceBlocks] >> test.py::test[join-right_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-right_trivial-off-Results] [SKIPPED] >> test.py::test[join-strict_keys--ForceBlocks] >> test.py::test[blocks-interval_add_interval--Results] [GOOD] >> test.py::test[blocks-interval_mul--ForceBlocks] >> test.py::test[order_by-sort_with_take_limit--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_with_take_limit--Results] >> test_ydb_backup.py::TestIncompleteBackup::test_incomplete_backup_will_not_be_restored >> test.py::test[lambda-lambda_with_tie-default.txt-Results] [GOOD] >> test.py::test[lineage-select_field_filter-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_many-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] >> test.py::test[pragma-config_exec--Results] [GOOD] >> test.py::test[produce-process_multi_in_trivial_lambda--Results] [SKIPPED] >> test.py::test[produce-process_pure_with_sort-default.txt-Results] >> test.py::test[join-lookupjoin_inner_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o--Results] >> test.py::test[join-yql-8980--ForceBlocks] [GOOD] >> test.py::test[join-yql-8980--Results] >> test.py::test[pg-tpcds-q25-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] >> test.py::test[hor_join-max_in_tables--ForceBlocks] [GOOD] >> test.py::test[hor_join-max_in_tables--Results] >> test.py::test[blocks-add_int16--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int16--Results] >> test.py::test[order_by-sort_with_take_limit--Results] [GOOD] >> test.py::test[pg-all_data--ForceBlocks] >> test.py::test[schema-user_schema_existing_column--Results] [GOOD] >> test.py::test[schema-user_schema_mix3--Results] >> test.py::test[key_filter-lambda_with_null_filter--ForceBlocks] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--Results] >> test.py::test[join-strict_keys--ForceBlocks] [GOOD] |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-multi_to_empty_constraint--ForceBlocks] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--Results] >> test.py::test[join-alias_where_group--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] >> test.py::test[select-corr_name_in_select_seq-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] >> test.py::test[blocks-exists--Results] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--Results] >> test.py::test[window-full/session_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/session_compact--Results] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-ForceBlocks] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] [GOOD] >> test.py::test[select-exists_false-default.txt-Results] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-csv_with_names] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-parquet] >> test.py::test[join-star_join_inners--Results] >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-ForceBlocks] >> test.py::test[pg-pg_column_case--Results] >> test.py::test[blocks-add_int16--Results] [GOOD] >> test.py::test[blocks-block_input_sys_columns--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_sys_columns--Results] [SKIPPED] >> test.py::test[blocks-combine_all_max--ForceBlocks] |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[join-strict_keys--ForceBlocks] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--Results] [GOOD] >> test.py::test[optimizers-unused_columns_group--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--ForceBlocks] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> test.py::test[hor_join-max_in_tables--Results] [GOOD] >> test.py::test[sampling-map--Results] [GOOD] >> test.py::test[sampling-reduce_with_presort--Results] [SKIPPED] >> test.py::test[sampling-sample-default.txt-Results] >> test.py::test[bigdate-table_io-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-ForceBlocks] >> test.py::test[in-yql-14677-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-yql-14677-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] >> test.py::test[schema-user_schema_mix3--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix3--Results] >> test.py::test[join-join_left_cbo--Results] [GOOD] >> test.py::test[join-left_trivial--Results] |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--Results] [GOOD] >> test.py::test[key_filter-mixed_sort--ForceBlocks] >> test.py::test[sampling-bind_expr_udf--ForceBlocks] [GOOD] >> test.py::test[sampling-bind_expr_udf--Results] >> test.py::test[schema-user_schema_mix3--Results] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt-Results] >> test_ydb_backup.py::TestIncompleteBackup::test_incomplete_backup_will_not_be_restored [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--ForceBlocks] |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[hor_join-max_in_tables--Results] [GOOD] >> test.py::test[key_filter-yql-14157--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-14157--Results] >> test.py::test[window-win_func_aggr_4func--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func--Results] >> test.py::test[window-full/noncompact_with_tablerow--Results] [GOOD] >> test.py::test[window-generic/session--Results] >> test.py::test[join-yql-8980--Results] [GOOD] >> test.py::test[json-jsondocument/insert--ForceBlocks] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-ForceBlocks] >> test.py::test[produce-process_pure_with_sort-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_all-default.txt-Results] >> test.py::test[schema-user_schema_mix3--Results] [GOOD] >> test.py::test[select-calculated_values-default.txt-ForceBlocks] >> test.py::test[in-yql-14677-default.txt-Results] [GOOD] >> test.py::test[insert-from_erasure_to_none--ForceBlocks] >> test.py::test[window-full/session_compact--Results] [GOOD] >> test.py::test[window-generic/session_aliases--ForceBlocks] >> test.py::test[blocks-date_not_equals_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_not_equals_scalar--Results] >> test.py::test[sampling-bind_expr_udf--Results] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_inner_2o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--Results] >> test.py::test[aggregate-group_by_mul_ru_ru--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] >> test.py::test[blocks-filter_by_column_with_drop--Results] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Results] >> test.py::test[join-mergejoin_saves_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--Results] >> test.py::test[pg-tpcds-q50-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] >> test.py::test[blocks-interval_mul--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_mul--Results] >> test.py::test[blocks-date_not_equals_scalar--Results] [GOOD] >> test.py::test[blocks-div_uint64_opt2--Results] >> test.py::test[pg-all_data--ForceBlocks] [GOOD] >> test.py::test[pg-all_data--Results] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--ForceBlocks] >> test.py::test[pg-pg_column_case--Results] [GOOD] >> test.py::test[pg-point-default.txt-ForceBlocks] >> test.py::test[select-exists_false-default.txt-Results] [GOOD] >> test.py::test[select-exists_true-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_grouping--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping--Results] >> test.py::test[window-win_func_aggr_4func--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--ForceBlocks] >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] >> test.py::test[key_filter-yql-14157--Results] [GOOD] >> test.py::test[blocks-date_not_equals--Results] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] >> test.py::test[optimizers-unused_columns_group--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_group--Results] >> test.py::test[sampling-sample-default.txt-Results] [GOOD] >> test_statistics.py::TestS3::test_convert[v2-client0-parquet-parquet] [GOOD] |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[sampling-bind_expr_udf--Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_filter-default.txt-Results] >> test_statistics.py::TestS3::test_precompute[v1-client0] >> test.py::test[select-dot_name_subrequest-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] >> test.py::test[blocks-combine_all_max--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_max--Results] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[key_filter-yql-14157--Results] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt-Results] [GOOD] >> test.py::test[select-host_count--Results] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-Results] >> test.py::test[pg-all_data--Results] [GOOD] >> test.py::test[blocks-interval_mul--Results] [GOOD] >> test.py::test[column_group-hint_append_fail-diff_grp-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append_fail-diff_grp-Results] [SKIPPED] >> test.py::test[json-jsondocument/insert--ForceBlocks] [GOOD] >> test.py::test[json-jsondocument/insert--Results] >> test.py::test[produce-reduce_all-default.txt-Results] [GOOD] >> test.py::test[key_filter-mixed_sort--ForceBlocks] [GOOD] >> test.py::test[key_filter-mixed_sort--Results] >> test.py::test[produce-reduce_all_multi_in-default.txt-Results] >> test.py::test[schema-select_with_map-sorted_desc-ForceBlocks] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] >> test.py::test[table_range-concat_with_view--Results] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_with_view--ForceBlocks] >> test.py::test[select-calculated_values-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-calculated_values-default.txt-Results] >> test.py::test[join-left_trivial--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] >> test.py::test[insert-from_erasure_to_none--ForceBlocks] [GOOD] >> test.py::test[insert-from_erasure_to_none--Results] >> test.py::test[optimizers-unused_columns_group--Results] [GOOD] >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--ForceBlocks] >> test.py::test[blocks-filter_direct_col--Results] >> test.py::test[blocks-combine_all_max--Results] [GOOD] >> test.py::test[pg-point-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-point-default.txt-Results] |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[pg-all_data--Results] [GOOD] |96.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[column_group-hint_append_fail-diff_grp-Results] [SKIPPED] >> test.py::test[select-exists_true-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect-1000-Results] >> test.py::test[lineage-list_literal1-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Results] [SKIPPED] >> test.py::test[lineage-pullup_rename--ForceBlocks] [SKIPPED] >> test.py::test[join-star_join_inners--Results] [GOOD] >> test.py::test[join-star_join_mirror-off-Results] [SKIPPED] >> test.py::test[join-yql-8131--Results] [SKIPPED] >> test.py::test[key_filter-contains_tuples-default.txt-Results] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] [GOOD] >> test.py::test[lineage-pullup_rename--Results] [SKIPPED] >> test.py::test[optimizers-simplified_path_constraint--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-simplified_path_constraint--Results] [SKIPPED] >> test.py::test[optimizers-yql-12620_stage_multiuse--ForceBlocks] >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-ForceBlocks] >> test.py::test[blocks-date_sub_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_scalar--Results] >> test.py::test[aggregate-group_by_rollup_grouping--Results] [GOOD] >> test.py::test[aggregate-having_distinct_expr--ForceBlocks] >> test.py::test[blocks-date_not_equals_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_op_decimal--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-ForceBlocks] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[blocks-combine_all_max--Results] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] >> test.py::test[type_v3-append_diff_flags--ForceBlocks] >> test.py::test[join-lookupjoin_semi_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_very_complex_type--Results] >> test.py::test[key_filter-mixed_sort--Results] [GOOD] >> test.py::test[key_filter-multiusage--ForceBlocks] >> test.py::test[insert-from_erasure_to_none--Results] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_filter-default.txt-Results] [GOOD] >> test.py::test[select-calculated_values-default.txt-Results] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-ForceBlocks] >> test.py::test[pg-point-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-ForceBlocks] >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> test.py::test[join-bush_in_in_in--ForceBlocks] >> test.py::test[blocks-coalesce_complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Results] >> test.py::test[pg-tpcds-q69-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-Results] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[schema-select_with_map-sorted_desc-Results] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] [GOOD] >> test.py::test[blocks-interval_div--Results] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--ForceBlocks] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] [GOOD] >> test.py::test[view-file_inner_udf--ForceBlocks] [SKIPPED] >> test.py::test[view-file_inner_udf--Results] [SKIPPED] >> test.py::test[weak_field-weak_field_join--ForceBlocks] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[sampling-subquery_filter-default.txt-Results] [GOOD] >> test_statistics.py::TestS3::test_precompute[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_precompute[v2-client0] >> test.py::test[blocks-interval_add_date_scalar--Results] [GOOD] >> test.py::test[blocks-interval_mul_scalar--Results] >> test.py::test[window-win_func_aggr_4func_sort--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--Results] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_session--ForceBlocks] >> test.py::test[window-generic/session--Results] [GOOD] >> test.py::test[window-leading/aggregations--Results] >> test.py::test[produce-reduce_all_multi_in-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin--Results] [SKIPPED] >> test.py::test[pg-tpcds-q69-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-ForceBlocks] >> test.py::test[table_range-concat_with_view--Results] [GOOD] >> test.py::test[tpch-q16-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_difftype_assume--Results] [SKIPPED] >> test.py::test[window-generic/session_aliases--ForceBlocks] [GOOD] >> test.py::test[window-generic/session_aliases--Results] >> test.py::test[table_range-concat_with_view--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_direct_col--Results] [GOOD] >> test.py::test[blocks-interval_div_scalar--Results] >> test.py::test[pg-tpch-q17-default.txt-Results] >> test.py::test[select-sampleselect-1000-Results] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-Results] >> test.py::test[csee-yql-7237--ForceBlocks] >> test.py::test[key_filter-dependent_value-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--ForceBlocks] >> test.py::test[key_filter-dependent_value-default.txt-Results] >> test.py::test[type_v3-append_diff_flags--ForceBlocks] [GOOD] >> test.py::test[type_v3-append_diff_flags--Results] >> test.py::test[blocks-decimal_op_decimal--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_op_decimal--Results] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] [GOOD] >> test.py::test[aggregate-having_distinct_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Results] >> test.py::test[select-host_count--Results] [GOOD] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[table_range-concat_with_view--ForceBlocks] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_timestamp[v2] [GOOD] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[produce-reduce_multi_in_difftype_assume--Results] [SKIPPED] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v1] >> test.py::test[join-mapjoin_on_tablerecord-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-ForceBlocks] >> test.py::test[key_filter-multiusage--ForceBlocks] [GOOD] >> test.py::test[key_filter-multiusage--Results] >> test.py::test[window-win_func_aggr_4func_sort--Results] [GOOD] >> test.py::test[window-win_func_spec_with_part--ForceBlocks] >> test.py::test[type_v3-append_diff_flags--Results] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-ForceBlocks] >> test.py::test[pg-select_qstarref2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Results] >> test.py::test[optimizers-yql-6038_direct_row--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--Results] >> test.py::test[select-from_in_front_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] >> test.py::test[optimizers-yql-12620_stage_multiuse--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] >> test.py::test[blocks-decimal_op_decimal--Results] [GOOD] >> test.py::test[blocks-interval_div--ForceBlocks] >> test.py::test[key_filter-dependent_value-default.txt-Results] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-key_double_opt_suffix--Results] [SKIPPED] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-ForceBlocks] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[select-host_count--Results] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-ForceBlocks] >> test.py::test[join-bush_in_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in--Results] >> test.py::test[action-dep_world_action_quote-default.txt-ForceBlocks] >> test.py::test[blocks-date_sub_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_comparison--ForceBlocks] >> test.py::test[weak_field-weak_field_join--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join--Results] >> test.py::test[aggregate-having_cast-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] >> test.py::test[key_filter-contains_tuples-default.txt-Results] [GOOD] >> test.py::test[key_filter-empty_range--Results] >> test.py::test[window-generic/session_aliases--Results] [GOOD] >> test.py::test[window-win_func_in_lib--ForceBlocks] >> test.py::test[pg-tpcds-q76-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Results] >> test.py::test[aggregate-having_distinct_expr--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal--ForceBlocks] >> test.py::test[blocks-interval_add_interval_scalar--ForceBlocks] >> test.py::test[blocks-interval_div--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval--Results] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--ForceBlocks] >> test.py::test[select-select_all_filtered-default.txt-Results] [GOOD] >> test_statistics.py::TestS3::test_precompute[v2-client0] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] >> test_statistics.py::TestS3::test_sum[v1-client0] >> test.py::test[blocks-interval_div_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] >> test.py::test[pg-select_qstarref2-default.txt-Results] [GOOD] >> test.py::test[pg-table_func-default.txt-ForceBlocks] >> test.py::test[select-from_in_front_sub-default.txt-Results] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt-ForceBlocks] >> test.py::test[tpch-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] >> test.py::test[key_filter-multiusage--Results] [GOOD] >> test.py::test[key_filter-no_bypass_merge--ForceBlocks] >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[tpch-q16-default.txt-Results] >> test.py::test[blocks-combine_all_count_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Results] >> test.py::test[key_filter-no_bypass_merge--ForceBlocks] [SKIPPED] >> test.py::test[join-pushdown_filter_over_left--ForceBlocks] >> test.py::test[key_filter-no_bypass_merge--Results] [SKIPPED] >> test.py::test[key_filter-string_with_legacy--ForceBlocks] >> test.py::test[weak_field-weak_field_join--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--ForceBlocks] >> test.py::test[select-append_to_value--Results] >> test.py::test[optimizers-yql-6038_direct_row--Results] [GOOD] >> test.py::test[order_by-assume_with_filter--ForceBlocks] >> test.py::test[blocks-interval_mul_scalar--Results] [GOOD] >> test.py::test[column_group-hint_anon_groups-single-Results] [SKIPPED] >> test.py::test[count-count_all_view_concat--Results] >> test.py::test[aggregate-group_by_session--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session--Results] >> test.py::test[join-mapjoin_on_very_complex_type--Results] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] >> test.py::test[union_all-union_all_subexpr-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[csee-yql-7237--ForceBlocks] [GOOD] >> test.py::test[csee-yql-7237--Results] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] >> test.py::test[blocks-combine_all_count_filter_opt--Results] [GOOD] >> test.py::test[coalesce-coalesce--ForceBlocks] >> test.py::test[hor_join-sorted_out_mix--Results] [SKIPPED] >> test.py::test[hor_join-yql19332_aux_cols--Results] >> test.py::test[aggregate-ensure_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-Results] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys--ForceBlocks] >> test.py::test[blocks-interval_div--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_div--Results] >> test.py::test[aggregate-having_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-having_cast-default.txt-Results] >> test.py::test[join-bush_in_in_in--Results] [GOOD] >> test.py::test[join-bush_in_in_in-off-ForceBlocks] >> test.py::test[blocks-decimal_comparison--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_comparison--Results] >> test.py::test[aggr_factory-multi--Results] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Results] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] [GOOD] >> test.py::test[select-shift_columns-default.txt-Results] >> test_ydb_backup.py::TestAlterBackupRestore::test_alter_table_with_data_backup_restore >> test.py::test[window-win_func_spec_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_spec_with_part--Results] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-ForceBlocks] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--ForceBlocks] >> test.py::test[blocks-interval_add_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Results] >> test.py::test[aggregate-ensure_count-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--ForceBlocks] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-ForceBlocks] >> test.py::test[key_filter-range_union--ForceBlocks] >> test.py::test[window-win_func_in_lib--ForceBlocks] [GOOD] >> test.py::test[window-win_func_in_lib--Results] >> test.py::test[action-dep_world_action_quote-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Results] >> test.py::test[count-count_all_view_concat--Results] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] >> test.py::test[pg-tpch-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-ForceBlocks] >> test.py::test[tpch-q16-default.txt-Results] [GOOD] >> test.py::test[tpch-q2-default.txt-ForceBlocks] >> test.py::test[select-one_unlabeled_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt-Results] >> test.py::test[select-append_to_value--Results] [GOOD] >> test.py::test[select-boolean_where--Results] >> test.py::test[blocks-interval_sub_interval--Results] [GOOD] >> test.py::test[blocks-pg_to_dates--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] >> test.py::test[in-in_ansi_join--Results] [GOOD] >> test.py::test[in-in_enum_single1-default.txt-Results] >> test.py::test[case-case_multi_val-default.txt-ForceBlocks] >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join--ForceBlocks] >> test.py::test[blocks-interval_div--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar--ForceBlocks] |96.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[window-leading/aggregations--Results] [GOOD] >> test.py::test[window-rank/opt--Results] >> test.py::test[join-pushdown_filter_over_left--ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Results] >> test.py::test[blocks-combine_all_decimal--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_decimal--Results] >> test.py::test[blocks-interval_add_interval_scalar--Results] [GOOD] >> test.py::test[blocks-pg_tofrom--ForceBlocks] >> test.py::test[aggregate-having_cast-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_join_condition--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--Results] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[csee-yql-7237--Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas--ForceBlocks] >> test.py::test[select-one_unlabeled_column-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-ForceBlocks] >> test.py::test[order_by-assume_with_filter--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_with_filter--Results] >> test.py::test[window-win_func_spec_with_part--Results] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-ForceBlocks] >> test.py::test[coalesce-coalesce--ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] >> test.py::test[window-win_func_in_lib--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--ForceBlocks] >> test.py::test[key_filter-string_with_legacy--ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with_legacy--Results] >> test.py::test[insert-multiappend_sorted-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-Results] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] [GOOD] >> test.py::test[join-mergejoin_force_per_link--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names--Results] >> test.py::test[action-dep_world_action_quote-default.txt-Results] [GOOD] >> test.py::test[action-eval_folder--ForceBlocks] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-ForceBlocks] >> test.py::test[select-shift_columns-default.txt-Results] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Results] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--Results] >> test_ydb_backup.py::TestAlterBackupRestore::test_alter_table_with_data_backup_restore [GOOD] >> test.py::test[order_by-assume_with_filter--Results] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column--ForceBlocks] >> test.py::test[join-bush_in_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in-off-Results] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> test.py::test[weak_field-weak_field_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-Results] >> test.py::test[coalesce-coalesce--Results] [GOOD] >> test.py::test[column_group-hint_anon-disable-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-disable-Results] [SKIPPED] >> test.py::test[column_group-publish-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-publish-perusage-Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] >> test.py::test[column_order-align_publish_native--ForceBlocks] >> test.py::test[join-pushdown_filter_over_left--Results] [GOOD] >> test.py::test[join-star_join_mirror--ForceBlocks] >> test.py::test[aggregate-group_by_expr_lookup--ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] >> test.py::test[blocks-combine_all_decimal--Results] [GOOD] >> test.py::test[blocks-combine_all_some_filter--ForceBlocks] >> test.py::test[pg-table_func-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--Results] >> test.py::test[weak_field-weak_field_join_condition--Results] [GOOD] >> test.py::test[window-current/aggregations_leadlag--ForceBlocks] >> test.py::test[pg-table_func-default.txt-Results] >> test.py::test[blocks-pg_to_dates--Results] [GOOD] >> test.py::test[blocks-sort_two_desc--Results] >> test.py::test[select-boolean_where--Results] [GOOD] >> test.py::test[select-deep_udf_call--Results] >> test.py::test[in-in_enum_single1-default.txt-Results] [GOOD] >> test.py::test[in-in_exists_immediate_nested_subq--Results] [SKIPPED] >> test.py::test[insert-drop_sortness-calc-Results] >> test.py::test[key_filter-string_with_legacy--Results] [GOOD] >> test.py::test[limit-zero_limit-default.txt-ForceBlocks] >> test.py::test[insert-multiappend_sorted-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-truncate_fail--ForceBlocks] >> test.py::test[case-case_multi_val-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_multi_val-default.txt-Results] >> test.py::test[hor_join-yql19332_aux_cols--Results] [GOOD] >> test.py::test[in-in_with_table_of_tuples-default.txt-Results] >> test.py::test[order_by-order_by_tablepath_column--ForceBlocks] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off-ForceBlocks] >> test.py::test[weak_field-weak_field_type-default.txt-Results] [GOOD] >> test.py::test[window-full/leadlag_compact--ForceBlocks] >> test.py::test[aggregate-group_by_expr_semi_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join--Results] >> test.py::test[aggregate-group_by_session_distinct_compact--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] >> test.py::test[blocks-interval_sub_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar--Results] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> test.py::test[key_filter-empty_range--Results] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix--Results] [SKIPPED] >> test.py::test[key_filter-lambda_with_null_filter--Results] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] [GOOD] >> test.py::test[table_range-range_over_desc--ForceBlocks] >> test.py::test[blocks-pg_tofrom--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_tofrom--Results] >> test.py::test[dq-blacklisted_pragmas--ForceBlocks] [GOOD] >> test.py::test[dq-blacklisted_pragmas--Results] [SKIPPED] >> test.py::test[dq-join_cbo_native_3_tables--ForceBlocks] >> test.py::test[aggregate-group_by_expr_lookup--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-ForceBlocks] >> test.py::test[case-case_multi_val-default.txt-Results] [GOOD] >> test.py::test[case-case_then_else-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] [GOOD] >> test.py::test[join-mergejoin_force_align3--Results] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-ForceBlocks] [GOOD] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] >> test.py::test[insert_monotonic-truncate_fail--ForceBlocks] [GOOD] >> test.py::test[action-eval_folder--ForceBlocks] [GOOD] >> test.py::test[action-eval_folder--Results] >> test.py::test[tpch-q2-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q2-default.txt-Results] >> test.py::test[insert_monotonic-truncate_fail--Results] [GOOD] >> test.py::test[join-inner_with_order-off-ForceBlocks] >> test.py::test[blocks-mul_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-string_with--Results] >> test.py::test[select-table_funcs_spec-default.txt-Results] [GOOD] >> test.py::test[select-unlabeled--Results] >> test.py::test[window-win_multiaggr_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] >> test.py::test[blocks-interval_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--ForceBlocks] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[join-mergejoin_force_align3--Results] [SKIPPED] >> test.py::test[blocks-pg_tofrom--Results] [GOOD] >> test.py::test[blocks-tuple_nth--ForceBlocks] >> test.py::test[union_all-union_all_multiple-default.txt-Results] >> test.py::test[insert-drop_sortness-calc-Results] [GOOD] >> test.py::test[insert-part_sortness-desc-Results] >> test.py::test[action-eval_folder--Results] [GOOD] >> test.py::test[action-export_action--ForceBlocks] >> test.py::test[order_by-order_by_tablerecord_column--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tablerecord_column--Results] >> test.py::test[aggr_factory-top-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--Results] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only--ForceBlocks] >> test.py::test[select-deep_udf_call--Results] [GOOD] >> test.py::test[select-dict_lookup-default.txt-Results] >> test.py::test[blocks-finalize_hashed_keys--ForceBlocks] >> test.py::test[pg-tpch-q22-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] >> test.py::test[pg-table_func-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_semi_join--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_some_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_some_filter--Results] >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[case-case_val_when_then-default.txt-Results] >> test.py::test[limit-zero_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-zero_limit-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--ForceBlocks] >> test.py::test[column_order-align_publish_native--ForceBlocks] [GOOD] >> test.py::test[column_order-align_publish_native--Results] >> test.py::test[join-star_join_mirror--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Results] >> test.py::test[join-mergejoin_big_primary_unique-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary_with_retry--ForceBlocks] >> test.py::test[join-star_join_mirror--Results] >> test.py::test[window-win_func_lead_lag_worm_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] >> test.py::test[order_by-order_by_tablepath_column--Results] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] >> test.py::test[ypath-multi_key-default.txt-ForceBlocks] >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> test.py::test[type_v3-non_strict--ForceBlocks] >> test.py::test[key_filter-range_union--ForceBlocks] [GOOD] >> test.py::test[key_filter-range_union--Results] >> test.py::test[order_by-order_by_tablerecord_column--Results] [GOOD] |96.2%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part18/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-combine_all_some_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_sum--ForceBlocks] >> test.py::test[dq-join_cbo_native_3_tables--ForceBlocks] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel--ForceBlocks] >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] >> test.py::test[select-unlabeled--Results] [GOOD] >> test.py::test[select-use_cluster-default.txt-Results] >> test.py::test[case-case_then_else-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_then_else-default.txt-Results] >> test.py::test[window-current/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-current/aggregations_leadlag--Results] >> test.py::test[simple_columns-simple_columns_join_fail--ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-ForceBlocks] >> test.py::test[table_range-range_over_desc--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_desc--Results] >> test.py::test[join-inner_with_order-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_with_order-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_left--ForceBlocks] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[order_by-order_by_tablerecord_column--Results] [GOOD] >> test.py::test[column_order-align_publish_native--Results] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-Results] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-ForceBlocks] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Results] [GOOD] >> test.py::test[order_by-order_by_tablerow_column--ForceBlocks] >> test.py::test[window-full/leadlag_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/leadlag_compact--Results] >> test.py::test[insert-part_sortness-desc-Results] [GOOD] >> test_statistics.py::TestS3::test_sum[v1-client0] [GOOD] >> test.py::test[union_all-union_all_multiple-default.txt-Results] [GOOD] >> test.py::test[view-all_from_view--ForceBlocks] >> test.py::test[pg-tpcds-q50-default.txt-Results] >> test.py::test[blocks-string_with--Results] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Results] >> test.py::test[window-rank/opt--Results] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] >> test_statistics.py::TestS3::test_sum[v2-client0] >> test.py::test[select-dict_lookup-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] >> test.py::test[in-in_with_table_of_tuples-default.txt-Results] [GOOD] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] >> test.py::test[key_filter-lambda_with_null_filter--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] >> test.py::test[case-case_then_else-default.txt-Results] [GOOD] >> test.py::test[column_group-hint_dup_col_fail--ForceBlocks] [SKIPPED] >> test.py::test[action-export_action--ForceBlocks] [GOOD] >> test.py::test[column_group-hint_dup_col_fail--Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail--Results] [SKIPPED] >> test.py::test[column_order-ordered_plus_native--ForceBlocks] >> test.py::test[action-export_action--Results] >> test.py::test[pg-tpcds-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-Results] >> test.py::test[case-case_val_when_then-default.txt-Results] [GOOD] >> test.py::test[column_group-hint-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_anon-disable-Results] [SKIPPED] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[insert-part_sortness-desc-Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-ForceBlocks] >> test.py::test[blocks-finalize_hashed_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-finalize_hashed_keys--Results] >> test.py::test[key_filter-range_union--Results] [GOOD] >> test.py::test[key_filter-ranges--ForceBlocks] >> test.py::test[table_range-range_over_desc--Results] [GOOD] >> test.py::test[tpch-q18-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_session_only--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_only--Results] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[column_group-hint_anon-disable-Results] [SKIPPED] >> test.py::test[schema-user_schema_missing_column--ForceBlocks] >> test.py::test[blocks-tuple_nth--ForceBlocks] [GOOD] >> test.py::test[blocks-tuple_nth--Results] >> test.py::test[aggregate-percentile_interval-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-default-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_native-default-Results] [SKIPPED] >> test.py::test[blocks-block_input-aux_columns-ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input-aux_columns-Results] [SKIPPED] >> test.py::test[blocks-boolean_ops--ForceBlocks] >> test.py::test[aggregate-aggregate_udf_nested--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] >> test.py::test[pg-tpcds-q20-default.txt-Results] [GOOD] >> test.py::test[pg-wide_top_sort--ForceBlocks] >> test.py::test[join-mergejoin_choose_primary_with_retry--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] >> test.py::test[join-star_join_mirror--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] |96.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[join-mergejoin_with_different_key_names--Results] [GOOD] >> test.py::test[ypath-multi_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-multi_key-default.txt-Results] >> test.py::test[type_v3-non_strict--ForceBlocks] [GOOD] >> test.py::test[type_v3-non_strict--Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] [GOOD] >> test.py::test[produce-fuse_reduces_diff_sets--ForceBlocks] >> test.py::test[window-current/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-current/session_aliases--ForceBlocks] >> test.py::test[action-export_action--Results] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-ForceBlocks] |96.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part18/test-results/pytest/{meta.json ... results_accumulator.log} >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v1] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v2] >> test.py::test[blocks-finalize_hashed_keys--Results] [GOOD] >> test.py::test[blocks-pg_call--ForceBlocks] >> test.py::test[window-full/leadlag_compact--Results] [GOOD] >> test.py::test[window-generic/aggregations_before_current--ForceBlocks] >> test.py::test[aggregate-group_by_session_only--Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-Results] >> test.py::test[order_by-order_by_dynum-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Results] >> test.py::test[blocks-combine_all_sum--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum--Results] >> test.py::test[order_by-order_by_tablerow_column--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tablerow_column--Results] >> test.py::test[blocks-tuple_nth--Results] [GOOD] >> test.py::test[column_group-hint_anon_groups-disable-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Results] [SKIPPED] >> test.py::test[column_order-union_all_positional_unordered_fail--ForceBlocks] >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] |96.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_parallel--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_parallel--Results] [SKIPPED] >> test.py::test[dq-wrong_script--ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script--Results] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] [GOOD] >> test.py::test[select-exists_with_table-default.txt-Results] >> test.py::test[type_v3-non_strict--Results] [GOOD] >> test.py::test[view-secure_eval--ForceBlocks] >> test.py::test[dq-wrong_script--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_list_on_flatten_by--ForceBlocks] >> test.py::test[count-count_const_no_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Results] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-ForceBlocks] >> test.py::test[view-all_from_view--ForceBlocks] [GOOD] >> test.py::test[view-all_from_view--Results] >> test.py::test[join-left_join_right_pushdown_nested_left--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse--ForceBlocks] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] [GOOD] >> test.py::test[insert-select_operate_with_columns--Results] |96.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[aggregate-group_by_session_only--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx--Results] [GOOD] >> test.py::test[blocks-minmax_strings_filter--ForceBlocks] |96.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Results] [GOOD] >> test.py::test[blocks-tuple_type--Results] >> test.py::test[order_by-order_by_tablerow_column--Results] [GOOD] >> test.py::test[params-complex_yson--ForceBlocks] >> test.py::test[column_order-ordered_plus_native--ForceBlocks] [GOOD] >> test.py::test[column_order-ordered_plus_native--Results] >> test.py::test[window-win_func_on_cloned_source-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] >> test.py::test[count-count_const_no_grouping-default.txt-Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] [GOOD] >> test.py::test[key_filter-uuid--Results] >> test.py::test[order_by-order_by_dynum-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf--ForceBlocks] >> test.py::test[view-all_from_view--Results] [GOOD] >> test.py::test[view-file_inner_library--ForceBlocks] >> test.py::test[schema-user_schema_missing_column--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_missing_column--Results] >> test.py::test[column_order-union_all_positional_unordered_fail--ForceBlocks] [GOOD] >> test.py::test[column_order-union_all_positional_unordered_fail--Results] [GOOD] >> test.py::test[file-where_key_in_file_content_typed--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--ForceBlocks] >> test.py::test[blocks-combine_all_sum--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--ForceBlocks] >> test.py::test[view-secure_eval--ForceBlocks] [GOOD] >> test.py::test[view-secure_eval--Results] >> test.py::test[key_filter-ranges--ForceBlocks] [GOOD] >> test.py::test[column_group-hint_append_fail-diff_grp-Results] >> test.py::test[key_filter-ranges--Results] >> test.py::test[pg-wide_top_sort--ForceBlocks] [GOOD] >> test.py::test[pg-wide_top_sort--Results] >> test.py::test[view-secure_eval--Results] [GOOD] >> test.py::test[view-trivial_view_concat--ForceBlocks] >> test.py::test[column_group-hint_append_fail-diff_grp-Results] [SKIPPED] >> test.py::test[column_order-select_action-default.txt-Results] >> test.py::test[tpch-q18-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q18-default.txt-Results] >> test.py::test[blocks-boolean_ops--ForceBlocks] [GOOD] >> test.py::test[blocks-boolean_ops--Results] |96.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[count-count_const_no_grouping-default.txt-Results] [GOOD] >> test.py::test[action-action_eval_cluster_use--Results] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] [GOOD] >> test.py::test[window-win_extract_members-default.txt-Results] >> test.py::test[join-star_join_semionly_premap--ForceBlocks] [GOOD] >> test.py::test[join-star_join_semionly_premap--Results] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-ForceBlocks] >> test.py::test[column_order-ordered_plus_native--Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--ForceBlocks] >> test.py::test[produce-fuse_reduces_diff_sets--ForceBlocks] [GOOD] >> test.py::test[produce-fuse_reduces_diff_sets--Results] >> test.py::test[schema-user_schema_missing_column--Results] [GOOD] >> test.py::test[select-autoextract_source_value-default.txt-ForceBlocks] >> test.py::test[select-exists_with_table-default.txt-Results] [GOOD] >> test.py::test[select-optional_in_job--Results] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] >> test.py::test[pg-tpcds-q69-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-Results] >> test.py::test[blocks-pg_call--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_call--Results] >> test.py::test[key_filter-ranges--Results] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-ForceBlocks] >> test.py::test[union_all-path_and_record-default.txt-Results] >> test.py::test[blocks-boolean_ops--Results] [GOOD] >> test.py::test[blocks-combine_all_some--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test.py::test[insert-select_operate_with_columns--Results] [GOOD] >> test.py::test[insert-trivial_select-default.txt-Results] >> test.py::test[pg-wide_top_sort--Results] [GOOD] >> test.py::test[produce-process_with_udf_rows-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_tablerow_column--ForceBlocks] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null--Results] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Results] [SKIPPED] >> test.py::test[join-premap_common_left_cross-off-ForceBlocks] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--ForceBlocks] >> test.py::test[pg-tpcds-q63-default.txt-Results] |96.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreSingleTable::test_single_table >> test.py::test[blocks-tuple_type--Results] [GOOD] >> test.py::test[column_group-hint_append_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail4--Results] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_list_on_flatten_by--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] >> test.py::test[join-star_join_semionly_premap--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-ForceBlocks] >> test.py::test[params-complex_yson--ForceBlocks] [GOOD] >> test.py::test[params-complex_yson--Results] >> test.py::test[produce-fuse_reduces_diff_sets--Results] [GOOD] >> test.py::test[produce-process_rows_and_filter--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_rows_and_filter--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in-empty-ForceBlocks] >> test.py::test[blocks-pg_call--Results] [GOOD] >> test.py::test[blocks-pg_to_interval--ForceBlocks] >> test.py::test[blocks-minmax_strings_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-minmax_strings_filter--Results] >> test.py::test[view-file_inner_library--ForceBlocks] [GOOD] >> test.py::test[view-file_inner_library--Results] >> test.py::test[tpch-q18-default.txt-Results] [GOOD] >> test.py::test[tpch-q21-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_udf--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_udf--Results] >> test.py::test[aggregate-group_by_rollup_column_reuse--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] |96.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[column_group-hint_dup_col_fail--Results] [SKIPPED] >> test.py::test[file-where_key_in_file_content_typed--ForceBlocks] [GOOD] >> test.py::test[file-where_key_in_file_content_typed--Results] >> test.py::test[params-complex_yson--Results] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-ForceBlocks] >> test.py::test[view-trivial_view_concat--ForceBlocks] [GOOD] >> test.py::test[view-trivial_view_concat--Results] >> test.py::test[join-bush_in--ForceBlocks] >> test.py::test[view-file_inner_library--Results] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--ForceBlocks] >> test.py::test[column_order-select_action-default.txt-Results] [GOOD] >> test.py::test[count-count_all-default.txt-Results] >> test.py::test[action-action_eval_cluster_use--Results] [GOOD] >> test.py::test[window-current/session_aliases--ForceBlocks] [GOOD] >> test.py::test[action-eval_atom_wrong_type_param--Results] [SKIPPED] >> test.py::test[window-current/session_aliases--Results] >> test.py::test[action-eval_on_modif_table_fail--Results] |96.3%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--ForceBlocks] >> test.py::test[library-package--ForceBlocks] [SKIPPED] >> test.py::test[library-package--Results] [SKIPPED] >> test.py::test[library-package_override--ForceBlocks] [SKIPPED] >> test.py::test[library-package_override--Results] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--ForceBlocks] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_window_func--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] >> test.py::test[insert-trivial_select-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-truncate_fail--Results] >> test.py::test[blocks-minmax_strings_filter--Results] [GOOD] >> test.py::test[blocks-mod_uint64--ForceBlocks] >> test.py::test[order_by-order_by_udf--Results] [GOOD] >> test.py::test[pg-nulls-default.txt-ForceBlocks] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] >> test.py::test[file-where_key_in_file_content_typed--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--ForceBlocks] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf_rows-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] >> test.py::test[pg-tpcds-q63-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] >> test.py::test[view-trivial_view_concat--Results] [GOOD] >> test.py::test[weak_field-weak_field_data--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_asc--ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Results] >> test.py::test[select-autoextract_source_value-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-autoextract_source_value-default.txt-Results] >> test.py::test[key_filter-is_null--Results] [GOOD] >> test.py::test[key_filter-mixed_sort--Results] >> test.py::test[pg-select_from_columns-default.txt-Results] >> test.py::test[union_all-path_and_record-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_multiin--Results] >> test.py::test[blocks-distinct_opt_state_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] >> test.py::test[aggregate-group_by_tablerow_column--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_tablerow_column--Results] >> test.py::test[select-optional_in_job--Results] [GOOD] >> test.py::test[select-optional_pull--Results] >> test.py::test[action-eval_on_modif_table_fail--Results] [GOOD] >> test.py::test[action-eval_range--Results] >> test.py::test[action-subquery_merge_nested_subquery--ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Results] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] >> test.py::test[key_filter-uuid--Results] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] >> test.py::test[blocks-combine_all_some--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_some--Results] >> test.py::test[window-generic/aggregations_before_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_before_current--Results] >> test.py::test[window-win_extract_members-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--ForceBlocks] >> test.py::test[window-win_func_auto_arg-default.txt-Results] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--ForceBlocks] >> test.py::test[blocks-pg_to_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_interval--Results] >> test.py::test[join-star_join_semionly_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-Results] >> test.py::test[insert_monotonic-truncate_fail--Results] [GOOD] >> test.py::test[join-cbo_7tables--Results] [SKIPPED] >> test.py::test[join-emptyjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-equi_join_by_expr--Results] >> test.py::test[count-count_all-default.txt-Results] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Results] >> test.py::test[join-star_join_semionly_premap-off-Results] [SKIPPED] >> test.py::test[join-yql-12022--ForceBlocks] >> test.py::test[window-win_fuse_window-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] >> test_ydb_backup.py::TestPermissionsBackupRestoreSingleTable::test_single_table [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] [GOOD] >> test.py::test[join-premap_common_left_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Results] [SKIPPED] >> test.py::test[like-like_clause_escape-default.txt-ForceBlocks] >> test.py::test[join-premap_common_right_tablecontent-off-ForceBlocks] >> test.py::test[window-current/session_aliases--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--ForceBlocks] >> test.py::test[window-generic/aggregations_after_current--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_asc--Results] [GOOD] >> test.py::test[expr-inline_call--ForceBlocks] >> test.py::test[pg-tpcds-q22-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-Results] >> test.py::test[action-subquery_merge_nested_subquery--Results] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] >> test.py::test[select-autoextract_source_value-default.txt-Results] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_tablerow_column--Results] [GOOD] >> test.py::test[aggregate-group_by_tz_date--ForceBlocks] >> test.py::test[weak_field-weak_field_infer_scheme--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--Results] |96.3%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-group_by_ru_with_window_func--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--ForceBlocks] >> test.py::test[blocks-pg_to_interval--Results] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] >> test.py::test[blocks-sort_two_asc--ForceBlocks] >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q48-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_some--Results] [GOOD] >> test.py::test[blocks-combine_hashed_avg--ForceBlocks] >> test.py::test[join-bush_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in--Results] >> test.py::test[blocks-combine_all_pg_filter--ForceBlocks] >> test.py::test[pg-select_from_columns-default.txt-Results] [GOOD] >> test.py::test[pg-select_limit-default.txt-ForceBlocks] >> test.py::test[action-eval_range--Results] [GOOD] >> test.py::test[action-eval_skip_take--Results] >> test.py::test[blocks-mod_uint64--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Results] >> test.py::test[blocks-mod_uint64--Results] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] >> test.py::test[weak_field-weak_field_data--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_data--Results] >> test.py::test[weak_field-weak_field_infer_scheme--Results] [GOOD] >> test.py::test[window-current/session_extended--ForceBlocks] >> test.py::test[tpch-q21-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q21-default.txt-Results] >> test.py::test[union_all-union_all_multiin--Results] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] >> test.py::test[flatten_by-flatten_by_opt_dict--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] >> test.py::test[pg-tpch-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-Results] >> test_statistics.py::TestS3::test_sum[v2-client0] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] >> test.py::test[produce-reduce_multi_in-empty-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--ForceBlocks] >> test.py::test[count-count_const_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable--Results] >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] >> test.py::test[window-generic/aggregations_before_current--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--ForceBlocks] >> test.py::test[aggregate-subquery_aggregation--Results] >> test.py::test[blocks-mod_uint64--Results] [GOOD] >> test.py::test[blocks-pg_to_numbers--ForceBlocks] >> test.py::test[select-optional_pull--Results] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Results] >> test.py::test[window-win_fuse_window-default.txt-Results] [GOOD] >> test.py::test[window-win_over_few_partitions_other--ForceBlocks] >> test.py::test[weak_field-weak_field_data--Results] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Results] [GOOD] >> test.py::test[hor_join-double_input-default.txt-ForceBlocks] >> test.py::test[window-current/session_incompat_sort--ForceBlocks] >> test.py::test[blocks-distinct_opt_state_keys--Results] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--ForceBlocks] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-ForceBlocks] >> test.py::test[expr-inline_call--ForceBlocks] [GOOD] >> test.py::test[expr-inline_call--Results] >> test.py::test[key_filter-mixed_sort--Results] [GOOD] >> test.py::test[library-package_override--Results] [SKIPPED] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] >> test.py::test[like-like_clause_escape-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Results] >> test.py::test[select-corr_name_in_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Results] >> test.py::test[join-yql-12022--ForceBlocks] [GOOD] >> test.py::test[join-yql-12022--Results] >> test.py::test[table_range-tablepath_with_non_existing--ForceBlocks] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--Results] >> test.py::test[pg-nulls-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-nulls-default.txt-Results] >> test.py::test[join-bush_in--Results] [GOOD] >> test.py::test[join-bush_in-off-ForceBlocks] >> test.py::test[join-premap_common_right_tablecontent-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort2--ForceBlocks] >> test.py::test[aggregate-group_by_tz_date--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_tz_date--Results] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Results] >> test.py::test[pg-tpcds-q48-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q48-default.txt-Results] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_presort--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] >> test.py::test[expr-inline_call--Results] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-ForceBlocks] >> test.py::test[pg-select_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_limit-default.txt-Results] >> test.py::test[tpch-q21-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_struct-default.txt-ForceBlocks] >> test.py::test[blocks-sort_two_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_asc--Results] >> test.py::test[aggregate-group_by_session_only_distinct--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Results] >> test.py::test[join-equi_join_by_expr--Results] [GOOD] >> test.py::test[join-equi_join_by_expr-off-Results] [SKIPPED] >> test.py::test[join-filter_joined--Results] >> test.py::test[aggr_factory-avg_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_sort_limit--ForceBlocks] [SKIPPED] >> test.py::test[limit-dynamic_sort_limit--Results] [SKIPPED] >> test.py::test[limit-empty_input_after_limit-default.txt-ForceBlocks] >> test.py::test[aggr_factory-avg_if-default.txt-Results] >> test.py::test[aggregate-group_by_session_distinct--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Results] >> test.py::test[table_range-tablepath_with_non_existing--Results] [GOOD] >> test.py::test[tpch-q4-default.txt-ForceBlocks] >> test.py::test[select-corr_name_in_select-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_avg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Results] >> test.py::test[action-eval_skip_take--Results] [GOOD] >> test.py::test[action-insert_after_eval--Results] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] [GOOD] >> test.py::test[aggregate-list_with_fold_map--ForceBlocks] >> test.py::test[pg-tpcds-q48-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-ForceBlocks] >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] >> test.py::test[join-yql-12022--Results] [GOOD] >> test.py::test[join-yql-14829_left--ForceBlocks] >> test.py::test[aggregate-group_by_tz_date--Results] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-ForceBlocks] >> test.py::test[pg-nulls-default.txt-Results] [GOOD] >> test.py::test[pg-pg_column_case--ForceBlocks] |96.3%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[like-like_clause-default.txt-Results] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] >> test.py::test[pg-select_limit-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q01-default.txt-ForceBlocks] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] [GOOD] >> test.py::test[view-secure_eval--Results] >> test.py::test[blocks-sort_two_asc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--ForceBlocks] >> test_statistics.py::TestS3::test_aborted_by_user[v1-client0] [GOOD] >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] >> test.py::test[select-reuse_named_node-default.txt-Results] [GOOD] >> test.py::test[select-sample_limit_recordindex--Results] >> test.py::test[pg-tpcds-q67-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_presort--Results] [GOOD] >> test.py::test[produce-reduce_with_python_few_keys--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys--Results] [SKIPPED] >> test.py::test[sampling-bind_small_rate-default.txt-ForceBlocks] >> test.py::test[count-count_nullable--Results] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-Results] >> test.py::test[window-win_func_auto_arg-default.txt-Results] [GOOD] >> test.py::test[window-win_func_first_last_rev--Results] >> test.py::test[blocks-combine_all_pg_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Results] >> test.py::test[dq-read_cost_native-default.txt-Results] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Results] [SKIPPED] >> test.py::test[epochs-reset_sortness_on_append--Results] >> test.py::test[blocks-pg_to_numbers--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_numbers--Results] >> test.py::test[blocks-filter_by_column_with_drop--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--Results] >> test.py::test[optimizers-test_fuse_map_take-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] >> test.py::test[aggregate-group_by_session_only_distinct--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped--ForceBlocks] >> test.py::test[hor_join-double_input-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Results] >> test.py::test[aggr_factory-avg_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-ForceBlocks] >> test.py::test[aggregate-subquery_aggregation--Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] >> test.py::test[window-generic/aggregations_after_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_after_current--Results] >> test.py::test[view-secure_eval--Results] [GOOD] >> test.py::test[view-trivial_view--Results] >> test.py::test[blocks-combine_hashed_avg--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_all--ForceBlocks] >> test.py::test[window-current/session_extended--ForceBlocks] [GOOD] >> test.py::test[window-current/session_extended--Results] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-limit-dynamic-Results] >> test.py::test[join-premap_merge_extrasort2--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] >> test.py::test[pg-tpch-q14-default.txt-Results] >> test.py::test[aggregate-group_by_session_distinct--Results] [GOOD] >> test.py::test[aggregate-percentile_and_variance--ForceBlocks] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] >> test.py::test[blocks-filter_by_column_with_drop--Results] [GOOD] >> test.py::test[blocks-filter_direct_col--ForceBlocks] >> test.py::test[expr-tagged_runtime-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Results] >> test.py::test[join-bush_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_in-off-Results] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk_eval-off-ForceBlocks] >> test.py::test[blocks-pg_to_numbers--Results] [GOOD] >> test.py::test[case-case_val_when_then-default.txt-ForceBlocks] >> test.py::test[hor_join-double_input-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1--Results] [SKIPPED] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[window-win_over_few_partitions_other--ForceBlocks] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Results] |96.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[join-premap_merge_extrasort2--ForceBlocks] [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] >> test.py::test[select-dict_lookup-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup-default.txt-Results] >> test.py::test[pg-pg_column_case--ForceBlocks] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--ForceBlocks] >> test.py::test[window-current/session_incompat_sort--ForceBlocks] [GOOD] >> test.py::test[window-current/session_incompat_sort--Results] >> test.py::test[aggregate-list_with_fold_map--ForceBlocks] [GOOD] >> test.py::test[aggregate-list_with_fold_map--Results] >> test.py::test[tpch-q4-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q4-default.txt-Results] >> test.py::test[action-insert_after_eval--Results] [GOOD] >> test.py::test[action-parallel_for-default.txt-Results] >> test.py::test[blocks-combine_all_pg_filter--Results] [GOOD] >> test.py::test[blocks-date_equals--ForceBlocks] >> test.py::test[type_v3-append_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q01-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q01-default.txt-Results] >> test.py::test[type_v3-append_struct-default.txt-Results] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] >> test.py::test[join-filter_joined--Results] [GOOD] >> test.py::test[join-flatten_columns2-off-Results] [SKIPPED] >> test.py::test[join-from_in_front_join--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:55.530001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:55.530028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:55.530034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:55.530039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:55.530045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:55.530049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:55.530058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:55.530072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:55.530170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:55.530236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:55.542580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:55.542601Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:55.542710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:55.544585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:55.544616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:55.544653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:55.545622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:55.545665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:55.545782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:55.545821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:55.546324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:55.546602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:55.546613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:55.546681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:55.546688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:55.546694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:55.546719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:55.548161Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:55.568349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:55.568440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.568489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:55.568557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:55.568569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.569177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:55.569201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:55.569273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.569284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:55.569288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:55.569292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:55.569718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.569730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:55.569735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:55.570186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.570196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.570202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:55.570207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:55.570867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:55.571372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:55.571413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:55.571623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:55.571651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:55.571658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:55.571715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:55.571722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:55.571754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:55.571766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:55.572397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:55.572407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:55.572443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:55.572449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:55.572459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:55.572466Z node 1 :FLAT_TX_SCHEMESHARD I ... TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T09:59:49.100710Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T09:59:49.100881Z node 263 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.100893Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.100896Z node 263 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T09:59:49.100898Z node 263 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2025-05-05T09:59:49.100901Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T09:59:49.100910Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T09:59:49.101220Z node 263 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T09:59:49.101275Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T09:59:49.101279Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T09:59:49.101283Z node 263 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T09:59:49.101645Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-05T09:59:49.101676Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-05-05T09:59:49.101874Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2025-05-05T09:59:49.101961Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:59:49.101983Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 1129576401004 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:59:49.101991Z node 263 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2025-05-05T09:59:49.102020Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-05T09:59:49.102029Z node 263 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T09:59:49.102033Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T09:59:49.102038Z node 263 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T09:59:49.102041Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T09:59:49.102053Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:59:49.102063Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T09:59:49.102074Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-05T09:59:49.102081Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T09:59:49.102085Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-05T09:59:49.102089Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-05-05T09:59:49.102101Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T09:59:49.102107Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-05T09:59:49.102111Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-05T09:59:49.102114Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-05T09:59:49.102258Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.102751Z node 263 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:59:49.102765Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:59:49.102809Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-05T09:59:49.102838Z node 263 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:59:49.102843Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [263:205:2207], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-05T09:59:49.102849Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [263:205:2207], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-05T09:59:49.103037Z node 263 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.103051Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.103056Z node 263 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T09:59:49.103061Z node 263 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-05T09:59:49.103066Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T09:59:49.103190Z node 263 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.103202Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.103210Z node 263 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T09:59:49.103215Z node 263 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-05T09:59:49.103219Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T09:59:49.103233Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-05T09:59:49.103239Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [263:123:2149] 2025-05-05T09:59:49.103304Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T09:59:49.103310Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T09:59:49.103321Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T09:59:49.103661Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.103928Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T09:59:49.103947Z node 263 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-05T09:59:49.103957Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-05-05T09:59:49.103965Z node 263 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T09:59:49.103970Z node 263 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-05T09:59:49.103975Z node 263 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 1004, itemIdx# 4294967295 2025-05-05T09:59:49.104021Z node 263 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T09:59:49.104270Z node 263 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T09:59:49.104318Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T09:59:49.104323Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T09:59:49.104378Z node 263 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T09:59:49.104390Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T09:59:49.104394Z node 263 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [263:985:2919] TestWaitNotification: OK eventTxId 1004 |96.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[hor_join-fuse_multi_outs2-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[expr-tagged_runtime-default.txt-Results] [GOOD] >> test.py::test[hor_join-yield_off--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-yield_off--Results] [SKIPPED] >> test.py::test[in-in_immediate_subquery-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_with_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_left--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] >> test.py::test[join-yql-14829_left--Results] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] [GOOD] >> test.py::test[in-in_compact_distinct--ForceBlocks] |96.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[pg-pg_column_case--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Results] >> test.py::test[select-dict_lookup-default.txt-Results] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-ForceBlocks] [SKIPPED] |96.4%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[select-optional_as_warn-default.txt-Results] [SKIPPED] >> test.py::test[select-result_rows_limit--ForceBlocks] [SKIPPED] >> test.py::test[select-result_rows_limit--Results] [SKIPPED] >> test.py::test[select-tablepathprefix-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] [GOOD] >> test.py::test[produce-reduce_with_flat_python_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_flat_python_stream--Results] [SKIPPED] >> test.py::test[sampling-bind_multiple_sample-default.txt-ForceBlocks] >> test.py::test[sampling-bind_small_rate-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Results] >> test.py::test[pg-tpcds-q01-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q27-default.txt-ForceBlocks] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] >> test.py::test[multicluster-basic-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-basic-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_group_one_of_multi--ForceBlocks] >> test.py::test[window-current/session_extended--Results] [GOOD] >> test.py::test[window-full/aggregations--ForceBlocks] >> test.py::test[select-sample_limit_recordindex--Results] [GOOD] >> test.py::test[select-select_all_group_by_column--Results] >> test.py::test[pg-tpch-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-Results] >> test.py::test[view-trivial_view--Results] [GOOD] >> test.py::test[view-view_with_lambda_process--Results] >> test.py::test[window-win_func_first_last_rev--Results] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Results] >> test.py::test[type_v3-append_struct-default.txt-Results] [GOOD] >> test.py::test[type_v3-singulars--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-singulars--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script--ForceBlocks] >> test.py::test[epochs-reset_sortness_on_append--Results] [GOOD] >> test.py::test[file-file_list_simple--Results] >> test_format_setting.py::TestS3::test_parquet_converters_to_datetime[v2] [GOOD] >> test.py::test[window-generic/aggregations_after_current--Results] [GOOD] >> test.py::test[window-row_number_to_map-default.txt-ForceBlocks] >> test.py::test[limit-limit-dynamic-Results] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] >> test.py::test[tpch-q4-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] >> test.py::test[aggregate-list_with_fold_map--Results] [GOOD] >> test.py::test[blocks-add_decimal--ForceBlocks] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v1] >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Results] [GOOD] >> test.py::test[column_group-hint_empty_grp_fail--ForceBlocks] >> test.py::test[blocks-distinct_mixed_all--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] [GOOD] >> test.py::test[blocks-distinct_mixed_all--Results] >> test.py::test[aggregate-library_error_in_aggregation_fail--ForceBlocks] >> test.py::test[window-current/session_incompat_sort--Results] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-ForceBlocks] >> test.py::test[column_group-hint_empty_grp_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail--Results] [SKIPPED] >> test.py::test[column_order-select_distinct_star-default.txt-ForceBlocks] >> test.py::test[sampling-bind_small_rate-default.txt-Results] [GOOD] >> test.py::test[blocks-filter_direct_col--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_direct_col--Results] >> test.py::test[aggregate-percentiles_grouped--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_grouped--Results] >> test.py::test[pg-tpcds-q70-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Results] >> test.py::test[case-case_val_when_then-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_val_when_then-default.txt-Results] >> test.py::test[aggr_factory-booland-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-Results] >> test.py::test[pg-tpch-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 09:58:39] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 09:58:41] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 09:58:42] "GET /nested_library.sql.txt HTTP/1.1" 200 - |96.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[sampling-bind_small_rate-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_and_variance--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_asterisk_eval-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_asterisk_eval-off-Results] [SKIPPED] >> test.py::test[join-full_equal_not_null--ForceBlocks] >> test.py::test[aggregate-percentile_and_variance--Results] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit--Results] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--Results] >> test.py::test[pg-tpch-q15-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_in--ForceBlocks] >> test.py::test[aggregate-library_error_in_aggregation_fail--ForceBlocks] [GOOD] >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] [GOOD] >> test.py::test[bigdate-table_common_type-default.txt-ForceBlocks] >> test.py::test[produce-process_multi_in--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_multi_in--Results] [SKIPPED] >> test.py::test[produce-reduce_lambda--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_lambda--Results] [SKIPPED] >> test.py::test[produce-reduce_lambda_presort_twin--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_lambda_presort_twin--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in--ForceBlocks] >> test.py::test[case-case_val_when_then-default.txt-Results] [GOOD] >> test.py::test[blocks-filter_direct_col--Results] [GOOD] >> test.py::test[blocks-interval_mul_scalar--ForceBlocks] >> test.py::test[in-in_compact_distinct--ForceBlocks] [GOOD] >> test.py::test[in-in_compact_distinct--Results] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] >> test.py::test[action-parallel_for-default.txt-Results] [GOOD] >> test.py::test[action-pending_arg_fail--Results] >> test.py::test[join-yql-14829_left--Results] [GOOD] >> test.py::test[join-yql_465--ForceBlocks] >> test.py::test[blocks-distinct_mixed_all--Results] [GOOD] >> test.py::test[blocks-exists--ForceBlocks] >> test.py::test[join-from_in_front_join--Results] [GOOD] >> test.py::test[join-from_in_front_join-off-Results] [SKIPPED] >> test.py::test[join-full_join-off-Results] [SKIPPED] >> test.py::test[join-grace_join1-grace-Results] [SKIPPED] >> test.py::test[join-group_compact_by--Results] >> test.py::test[pg-tpcds-q27-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q27-default.txt-Results] >> test.py::test[in-in_immediate_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-tablepathprefix-default.txt-Results] >> test.py::test[view-view_with_lambda_process--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_aggregate--Results] >> test.py::test[weak_field-weak_field--Results] >> test_ydb_backup.py::TestPermissionsBackupRestoreFolderWithTable::test_folder_with_table |96.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[case-case_val_when_then-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Results] [GOOD] >> test.py::test[udf-named_args_for_script--ForceBlocks] [GOOD] >> test.py::test[udf-named_args_for_script--Results] >> test.py::test[blocks-add_decimal--ForceBlocks] [GOOD] >> test.py::test[blocks-add_decimal--Results] >> test.py::test[produce-process_streaming_inline_bash-default.txt-ForceBlocks] >> test.py::test[file-file_list_simple--Results] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Results] >> test.py::test[optimizers-unused_columns_group_one_of_multi--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] >> test.py::test[select-where_in-default.txt-Results] >> test.py::test[aggr_factory-booland-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-ForceBlocks] |96.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[in-in_immediate_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[aggregate-rollup_with_dict--ForceBlocks] >> test.py::test[pg-tpcds-q27-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-ForceBlocks] >> test.py::test[sampling-bind_multiple_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] |96.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] [GOOD] >> test.py::test[select-select_all_group_by_column--Results] [GOOD] >> test.py::test[select-struct_access_without_table_name--Results] >> test.py::test[action-pending_arg_fail--Results] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Results] >> test.py::test[select-tablepathprefix-default.txt-Results] [GOOD] >> test.py::test[select-unlabeled_1000--ForceBlocks] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] [GOOD] >> test.py::test[order_by-native_desc_sort-over_sorted-ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Results] [SKIPPED] >> test.py::test[pg-join_using_multiple2--ForceBlocks] >> test.py::test[in-in_compact_distinct--Results] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt-Results] [GOOD] >> test.py::test[insert-override-with_view-ForceBlocks] [SKIPPED] >> test.py::test[insert-override-with_view-Results] [SKIPPED] >> test.py::test[lineage-error_type--Results] [SKIPPED] >> test.py::test[lineage-pullup_rename--Results] [SKIPPED] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-ForceBlocks] >> test.py::test[lineage-scalar_context--Results] [SKIPPED] >> test.py::test[lineage-select_mix_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-topsort-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-keepworld_emptyflatmap--Results] >> test.py::test[column_order-select_distinct_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt-Results] >> test.py::test[order_by-literal--Results] >> test.py::test[udf-named_args_for_script--Results] [GOOD] >> test.py::test[union-union_trivial-default.txt-ForceBlocks] >> test.py::test[aggregate-percentiles_grouped--Results] [GOOD] >> test.py::test[bigdate-table_yt_native-wo_compat-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_native-wo_compat-Results] [SKIPPED] >> test.py::test[binding-table_filter_strict_binding-default.txt-ForceBlocks] >> test.py::test[join-full_trivial--ForceBlocks] >> test.py::test[blocks-add_decimal--Results] [GOOD] >> test.py::test[blocks-block_input_various_types_2--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2--Results] [SKIPPED] >> test.py::test[blocks-combine_all_count--ForceBlocks] >> test.py::test[limit-insert_with_limit--Results] [GOOD] >> test.py::test[limit-limit--Results] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--Results] >> test.py::test[window-row_number_to_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_to_map-default.txt-Results] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--Results] [SKIPPED] >> test.py::test[optimizers-sort_by_nonstrict_const--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sort_by_nonstrict_const--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window--ForceBlocks] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] [GOOD] >> test.py::test[binding-bind_select-default.txt-Results] >> test.py::test[window-win_func_first_last_with_part--Results] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-ForceBlocks] >> test.py::test[join-full_equal_not_null--ForceBlocks] [GOOD] >> test.py::test[join-full_equal_not_null--Results] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] [GOOD] >> test.py::test[optimizers-yql-17413-topsort--ForceBlocks] >> test.py::test[window-full/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations--Results] >> test.py::test[window-win_by_all_avg_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] >> test.py::test[column_order-select_distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_mul_col--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] >> test.py::test[join-yql_465--ForceBlocks] [GOOD] >> test.py::test[join-yql_465--Results] >> test.py::test[blocks-exists--ForceBlocks] [GOOD] >> test.py::test[blocks-exists--Results] >> test.py::test[file-where_key_in_get_file_content--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] >> test.py::test[select-where_in-default.txt-Results] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] >> test.py::test[blocks-date_equals--ForceBlocks] [GOOD] >> test.py::test[blocks-date_equals--Results] >> test.py::test[optimizers-yt_shuffle_by_keys--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yt_shuffle_by_keys--Results] [SKIPPED] >> test.py::test[order_by-SortByOneFieldDesc--ForceBlocks] >> test.py::test[bigdate-table_common_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_common_type-default.txt-Results] >> test_ydb_backup.py::TestPermissionsBackupRestoreFolderWithTable::test_folder_with_table [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Results] >> test.py::test[weak_field-weak_field--Results] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] >> test.py::test[join-star_join--ForceBlocks] >> test.py::test[optimizers-keepworld_emptyflatmap--Results] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-full_equal_not_null--Results] [GOOD] >> test.py::test[join-full_join--ForceBlocks] >> test.py::test[produce-reduce_multi_in--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in--Results] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] >> test.py::test[order_by-literal--Results] [GOOD] >> test.py::test[order_by-native_desc_assume_with_transform--Results] [SKIPPED] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] >> test.py::test[order_by-native_desc_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] >> test.py::test[binding-bind_select-default.txt-Results] [GOOD] >> test.py::test[binding-drop_binding--Results] >> test.py::test[limit-limit--Results] [GOOD] >> test.py::test[lineage-select_field-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-basic-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force--Results] [SKIPPED] >> test.py::test[select-struct_access_without_table_name--Results] [GOOD] >> test.py::test[select-trivial_between-default.txt-Results] >> test.py::test[blocks-exists--Results] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--ForceBlocks] >> test.py::test[select-unlabeled_1000--ForceBlocks] [GOOD] >> test.py::test[select-unlabeled_1000--Results] >> test.py::test[join-yql_465--Results] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic-ForceBlocks] >> test.py::test[window-win_func_first_last--ForceBlocks] >> test.py::test[aggr_factory-count-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Results] >> test.py::test[pg-tpcds-q52-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-ForceBlocks] >> test.py::test[aggregate-rollup_with_dict--ForceBlocks] [GOOD] >> test.py::test[aggregate-rollup_with_dict--Results] >> test.py::test[union-union_trivial-default.txt-ForceBlocks] [GOOD] >> test.py::test[union-union_trivial-default.txt-Results] >> test.py::test[pg-join_using_multiple2--ForceBlocks] [GOOD] >> test.py::test[pg-join_using_multiple2--Results] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] >> test.py::test[join-group_compact_by--Results] [GOOD] >> test.py::test[join-inner_with_order--Results] >> test.py::test[window-win_by_all_aggregate--Results] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Results] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v1] [GOOD] >> test.py::test[join-full_trivial--ForceBlocks] [GOOD] >> test.py::test[join-full_trivial--Results] >> test.py::test[blocks-combine_all_count--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count--Results] >> test.py::test[window-win_func_aggr_hist--ForceBlocks] >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v2] |96.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[multicluster-partition_by_key_force--Results] [SKIPPED] >> test.py::test[binding-table_filter_strict_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] >> test.py::test[blocks-interval_mul_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_mul_scalar--Results] >> test.py::test[aggr_factory-linear_histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] >> test.py::test[window-row_number_to_map-default.txt-Results] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt-ForceBlocks] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_assume--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_assume--Results] [SKIPPED] >> test.py::test[sampling-bind_join_left-default.txt-ForceBlocks] >> test.py::test[sampling-join_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Results] >> test.py::test[dq-read_cost_native-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-Results] [SKIPPED] >> test.py::test[epochs-use_sorted_by_complex_type--ForceBlocks] >> test.py::test[select-unlabeled_1000--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-ForceBlocks] >> test.py::test[blocks-pg_to_strings--Results] >> test.py::test[produce-reduce_multi_in--Results] [GOOD] >> test.py::test[produce-reduce_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_multi_out--Results] [SKIPPED] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] >> test.py::test[udf-named_args_for_script_with_posargs--ForceBlocks] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int64--ForceBlocks] >> test.py::test[window-full/aggregations--Results] [GOOD] >> test.py::test[window-full/session--ForceBlocks] >> test.py::test[blocks-combine_all_count--Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter--ForceBlocks] >> test.py::test[union-union_trivial-default.txt-Results] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-ForceBlocks] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[tpch-q16-default.txt-Results] >> test.py::test[aggregate-rollup_with_dict--Results] [GOOD] >> test.py::test[blocks-add_uint64--ForceBlocks] >> test.py::test[binding-drop_binding--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Results] >> test.py::test[optimizers-yql-17413-topsort--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-17413-topsort--Results] >> test.py::test[order_by-SortByOneFieldDesc--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByOneFieldDesc--Results] >> test.py::test[optimizers-unused_columns_window--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_window--Results] |96.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[insert-replace_ordered_by_key_desc-default.txt-Results] [GOOD] |96.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[produce-reduce_multi_out--Results] [SKIPPED] >> test.py::test[join-full_trivial--Results] [GOOD] >> test.py::test[join-full_trivial-off-ForceBlocks] >> test.py::test[bigdate-table_common_type-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-wo_compat-Results] [SKIPPED] >> test.py::test[blocks-combine_all_pg--ForceBlocks] >> test.py::test[pg-tpch-q18-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_out--Results] >> test.py::test[blocks-interval_mul_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--ForceBlocks] >> test.py::test[blocks-mul_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--Results] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-ForceBlocks] >> test.py::test[produce-process_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_rows_and_filter--Results] [SKIPPED] >> test.py::test[produce-process_with_assume--Results] >> test.py::test[sampling-join_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-ForceBlocks] >> test.py::test[join-full_join--ForceBlocks] [GOOD] >> test.py::test[join-full_join--Results] >> test.py::test[pg-join_using_multiple2--Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-ForceBlocks] >> test.py::test[limit-insert_with_limit-dynamic-ForceBlocks] [GOOD] >> test.py::test[select-trivial_between-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-many-Results] >> test.py::test[limit-insert_with_limit-dynamic-Results] >> test.py::test[blocks-date_equals--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--ForceBlocks] >> test.py::test[pg-tpcds-q56-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Results] >> test.py::test[order_by-SortByOneFieldDesc--Results] [GOOD] >> test.py::test[pg-join_using_tables4-default.txt-ForceBlocks] >> test.py::test[window-win_func_first_last--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last--Results] >> test.py::test[join-star_join--ForceBlocks] [GOOD] >> test.py::test[join-star_join--Results] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_opt-default.txt-Results] >> test.py::test[blocks-mul_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-not_opt--ForceBlocks] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] [GOOD] >> test.py::test[window-rank/unordered--Results] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] >> test.py::test[optimizers-unused_columns_window--Results] [GOOD] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--Results] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc--ForceBlocks] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Results] [SKIPPED] >> test.py::test[order_by-assume_with_transform_desc--ForceBlocks] >> test.py::test[limit-insert_with_limit-dynamic-Results] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-17413-topsort--Results] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--ForceBlocks] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] >> test.py::test[sampling-bind_join_left-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] >> test.py::test[pg-tpcds-q56-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q74-default.txt-ForceBlocks] >> test.py::test[window-row_number_to_map_noncompact-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] >> test.py::test[join-inner_with_order--Results] [GOOD] >> test.py::test[join-inner_with_order-off-Results] [SKIPPED] >> test.py::test[blocks-add_int64--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int64--Results] |96.5%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part12/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-add_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-block_input_various_types--Results] [SKIPPED] >> test.py::test[blocks-coalesce_ints--Results] >> test.py::test[udf-named_args_for_script_with_posargs--ForceBlocks] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs--Results] >> test.py::test[join-full_join--Results] [GOOD] >> test.py::test[join-join_comp_map_table--ForceBlocks] >> test.py::test[blocks-add_uint64--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint64--Results] >> test.py::test[union_all-mix_map_and_read-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] >> test.py::test[blocks-combine_all_sum_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum_filter--Results] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-ForceBlocks] >> test.py::test[window-win_func_first_last--Results] [GOOD] >> test.py::test[blocks-pg_to_strings--Results] [GOOD] >> test.py::test[blocks-sub_uint64_opt2--Results] |96.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[join-inner_with_order-off-Results] [SKIPPED] >> test.py::test[epochs-use_sorted_by_complex_type--ForceBlocks] [GOOD] >> test.py::test[epochs-use_sorted_by_complex_type--Results] >> test.py::test[window-win_func_aggr_hist--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_hist--Results] >> test.py::test[join-left_all--Results] >> test.py::test[produce-process_with_assume--Results] [GOOD] >> test.py::test[produce-process_with_lambda-default.txt-Results] >> test.py::test[window-win_func_rank_by_opt_part--Results] [GOOD] >> test.py::test[ypath-complex-default.txt-Results] >> test.py::test[join-full_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-full_trivial-off-Results] [SKIPPED] >> test.py::test[join-group_compact_by--ForceBlocks] >> test.py::test[blocks-add_int64--Results] [GOOD] >> test.py::test[blocks-sort_two_desc--ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_nested--ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Results] >> test.py::test[udf-named_args_for_script_with_posargs--Results] [GOOD] >> test.py::test[udf-two_regexps--ForceBlocks] >> test.py::test[select-trivial_where-many-Results] [GOOD] >> test.py::test[select-trivial_where-one-Results] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-sorted_sql_in--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_group--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_disposition.py::TestContinueMode::test_disposition_fresh[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3724859) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [05/May/2025 09:58:24] send response localhost:63133/?database=local ::1 - - [05/May/2025 09:58:24] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - |96.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[window-win_func_first_last--Results] [GOOD] >> test.py::test[blocks-add_uint64--Results] [GOOD] >> test.py::test[blocks-combine_hashed_max--ForceBlocks] >> test.py::test[pg-tpcds-q09-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] >> test.py::test[sampling-bind_join_left-default.txt-Results] [GOOD] >> test.py::test[sampling-read-dynamic-ForceBlocks] >> test.py::test[aggr_factory-variance-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] [GOOD] >> test.py::test[window-win_func_rank_by_all--ForceBlocks] >> test.py::test[pg-join_using_tables4-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_tables4-default.txt-Results] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] >> test.py::test[blocks-combine_all_sum_filter--Results] [GOOD] >> test.py::test[blocks-compare--ForceBlocks] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] [GOOD] >> test.py::test[union_all-path_and_record-default.txt-ForceBlocks] >> test.py::test[join-star_join--Results] [GOOD] >> test.py::test[join-star_join_multi--ForceBlocks] >> test.py::test[ql_filter-integer_optional--ForceBlocks] >> test.py::test[blocks-not_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-not_opt--Results] >> test.py::test[produce-reduce_all_opt-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in--Results] >> test.py::test[in-in_compact_distinct-empty-Results] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] >> test.py::test[blocks-lazy_nonstrict_nested--Results] [GOOD] >> test.py::test[blocks-member--ForceBlocks] >> test.py::test[pg-tpcds-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_pg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_pg--Results] >> test.py::test[epochs-use_sorted_by_complex_type--Results] [GOOD] >> test.py::test[expr-constraints_of--ForceBlocks] >> test.py::test[window-win_func_aggr_hist--Results] [GOOD] >> test.py::test[window-win_func_first_last_rev--ForceBlocks] >> test.py::test[order_by-assume_with_transform_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_with_transform_desc--Results] >> test.py::test[pg-tpcds-q74-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q74-default.txt-Results] >> test.py::test[limit-many_top_sorts-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-Results] >> test_format_setting.py::TestS3::test_parquet_converters_to_string[v2] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--ForceBlocks] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v1] >> test.py::test[blocks-not_opt--Results] [GOOD] >> test.py::test[blocks-string_pass--ForceBlocks] >> test.py::test[blocks-sub_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-tuple_nth--Results] >> test.py::test[blocks-coalesce_ints--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal--Results] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_part-ForceBlocks] >> test.py::test[produce-process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] >> test.py::test[window-full/session--ForceBlocks] [GOOD] >> test.py::test[window-full/session--Results] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] |96.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part12/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[order_by-assume_with_transform_desc--Results] [GOOD] >> test.py::test[pg-tpcds-q74-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-ForceBlocks] >> test.py::test[pg-join_using_tables4-default.txt-Results] [GOOD] >> test.py::test[pg-select_starref2-default.txt-ForceBlocks] >> test.py::test[select-trivial_where-one-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] >> test.py::test[ypath-complex-default.txt-Results] [GOOD] >> test.py::test[ypath-multi_key-default.txt-Results] >> test.py::test[order_by-literal--ForceBlocks] >> test.py::test[join-group_compact_by--ForceBlocks] [GOOD] >> test.py::test[join-group_compact_by--Results] >> test.py::test[tpch-q16-default.txt-Results] [GOOD] >> test.py::test[tpch-q19-default.txt-Results] >> test.py::test[limit-many_top_sorts-default.txt-Results] [GOOD] >> test.py::test[limit-sort_calc_limit--ForceBlocks] >> test.py::test[blocks-sort_two_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_desc--Results] >> test.py::test[blocks-combine_hashed_max--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_max--Results] >> test.py::test[udf-two_regexps--ForceBlocks] [GOOD] >> test.py::test[udf-two_regexps--Results] >> test.py::test[window-rank/unordered--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func--Results] >> test.py::test[optimizers-yql-9297_publish_ytcopy--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] >> test.py::test[join-left_all--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_optional--Results] >> test.py::test[ql_filter-integer_optional--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_optional--Results] >> test.py::test[sampling-read-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-read-dynamic-Results] >> test.py::test[window-win_func_rank_by_all--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_by_all--Results] >> test.py::test[in-in_compact_distinct-empty-Results] [GOOD] >> test.py::test[in-in_noansi_join--Results] >> test.py::test[union_all-path_and_record-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-path_and_record-default.txt-Results] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[table_range-range_over_like--ForceBlocks] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-compare--ForceBlocks] [GOOD] >> test.py::test[blocks-compare--Results] >> test.py::test[expr-constraints_of--ForceBlocks] [GOOD] >> test.py::test[expr-constraints_of--Results] >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[join-join_comp_map_table--ForceBlocks] [GOOD] >> test.py::test[join-join_comp_map_table--Results] >> test.py::test[blocks-combine_all_pg--Results] [GOOD] >> test.py::test[blocks-date_add_interval_scalar--ForceBlocks] >> test.py::test[blocks-member--ForceBlocks] [GOOD] >> test.py::test[blocks-member--Results] >> test.py::test[udf-two_regexps--Results] [GOOD] >> test.py::test[udf-udaf--ForceBlocks] >> test.py::test[dq-dq_replicate_ok-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Results] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] |96.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_optional--Results] [GOOD] >> test.py::test[result_types-singular-default.txt-ForceBlocks] >> test.py::test[join-group_compact_by--Results] [GOOD] >> test.py::test[join-inner_all--ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] >> test.py::test[blocks-combine_hashed_max--Results] [GOOD] >> test.py::test[blocks-combine_hashed_sum_many_keys--ForceBlocks] |96.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[optimizers-unused_columns_group--Results] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] >> test.py::test[produce-reduce_multi_in--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Results] >> test.py::test[pg-tpcds-q92-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-read-dynamic-Results] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-ForceBlocks] >> test.py::test[expr-constraints_of--Results] [GOOD] >> test.py::test[expr-empty_iterator--ForceBlocks] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--ForceBlocks] [SKIPPED] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--Results] [SKIPPED] >> test.py::test[action-eval_pragma--ForceBlocks] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] [GOOD] >> test.py::test[order_by-order_by_dot_column-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q92-default.txt-Results] >> test.py::test[window-win_func_first_last_rev--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_rev--Results] >> test.py::test[schema-select_all-row_spec_part-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Results] >> test.py::test[blocks-string_pass--ForceBlocks] [GOOD] >> test.py::test[blocks-string_pass--Results] >> test.py::test[union_all-path_and_record-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_multiple-default.txt-ForceBlocks] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_list_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] >> test.py::test[window-win_func_rank_by_all--Results] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-ForceBlocks] >> test.py::test[join-star_join_multi--ForceBlocks] [GOOD] >> test.py::test[join-star_join_multi--Results] >> test.py::test[order_by-literal--ForceBlocks] [GOOD] >> test.py::test[blocks-compare--Results] [GOOD] >> test.py::test[blocks-distinct_pure_keys--ForceBlocks] >> test.py::test[blocks-member--Results] [GOOD] >> test.py::test[order_by-literal--Results] >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] >> test_ydb_backup.py::TestPermissionsBackupRestoreDontOverwriteOnAlreadyExisting::test_dont_overwrite_on_already_existing >> test.py::test[blocks-tuple_nth--Results] [GOOD] >> test.py::test[column_group-hint-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_non_str_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-select_limit_offset-default.txt-Results] >> test.py::test[pg-select_starref2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_starref2-default.txt-Results] >> test.py::test[pg-tpcds-q92-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-ForceBlocks] |96.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[blocks-member--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Results] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-ForceBlocks] >> test.py::test[window-full/session--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_agg--ForceBlocks] [GOOD] >> test.py::test[blocks-string_pass--Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo--Results] |96.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] >> test.py::test[order_by-literal--Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--ForceBlocks] >> test.py::test[pg-select_columnref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_common_type_unionall--Results] >> test.py::test[window-win_func_first_last_rev--Results] [GOOD] >> test.py::test[window-win_func_over_group_by--ForceBlocks] >> test.py::test[tpch-q19-default.txt-Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] |96.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[aggregate-group_by_ru_join_agg--ForceBlocks] [GOOD] >> test.py::test[join-join_comp_map_table--Results] [GOOD] >> test.py::test[join-join_comp_map_table-off-ForceBlocks] |96.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[blocks-string_pass--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_statistics.py::TestS3::test_aborted_by_user[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3698843) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/test_statistics.py.TestS3.test_egress.v1-client0-json_list/default/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/tools/fq_runner/kikimr_runner.py:184: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000cd2/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_statistics/testing_out_stuff/test_statistics.py.TestS3.test_egress.v1-client0-json_list/cp/node_1/metering.bill' mode='r' encoding='utf-8'> meterings_loaded = sum(1 for _ in open(bill_fname)) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3701706 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[table_range-range_over_like--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_like--Results] >> test.py::test[pg-select_starref2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q33-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] >> test.py::test[blocks-combine_all_decimal--Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] >> test.py::test[hor_join-double_input-default.txt-Results] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_optional--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_take_skip--Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_any_no_join_reduce-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--Results] [SKIPPED] >> test.py::test[limit-sort_calc_limit--ForceBlocks] [GOOD] >> test.py::test[action-eval_pragma--ForceBlocks] [GOOD] >> test.py::test[action-eval_pragma--Results] >> test.py::test[limit-sort_calc_limit--Results] >> test.py::test[join-mergejoin_semi_composite_to_inner-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested--Results] >> test.py::test[dq-dq_replicate_ok-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt-Results] [SKIPPED] >> test.py::test[result_types-singular-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-singular-default.txt-Results] >> test.py::test[expr-as_table_emptylist--ForceBlocks] >> test.py::test[order_by-order_by_dot_column-default.txt-ForceBlocks] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v1] [GOOD] >> test.py::test[join-inner_all--ForceBlocks] [GOOD] >> test.py::test[join-inner_all--Results] >> test.py::test[udf-udaf--ForceBlocks] [GOOD] >> test.py::test[udf-udaf--Results] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v2] >> test.py::test[table_range-range_over_like--Results] [GOOD] >> test.py::test[expr-empty_iterator--ForceBlocks] [GOOD] >> test.py::test[expr-empty_iterator--Results] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] [GOOD] >> test.py::test[order_by-assume_with_transform_desc--Results] [SKIPPED] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] >> test.py::test[sampling-take_with_sampling-default.txt-Results] >> test.py::test[union_all-union_all_multiple-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_pragma--Results] [GOOD] >> test.py::test[action-eval_range--ForceBlocks] |96.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[table_range-range_over_like--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] >> test.py::test[result_types-singular-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_topsort-default.txt-ForceBlocks] |96.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[order_by-assume_with_transform_desc--Results] [SKIPPED] >> test.py::test[schema-user_schema_bind-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt-Results] >> test.py::test[aggregate-group_by_ru_with_select_distinct--Results] [GOOD] >> test.py::test[aggregate-table_funcs_group_by-default.txt-ForceBlocks] >> test_ydb_backup.py::TestPermissionsBackupRestoreDontOverwriteOnAlreadyExisting::test_dont_overwrite_on_already_existing [GOOD] >> test.py::test[limit-sort_calc_limit--Results] [GOOD] >> test.py::test[lineage-list_literal4-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal4-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--ForceBlocks] >> test.py::test[produce-reduce_multi_in_keytuple--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] [GOOD] >> test.py::test[column_order-select_limit_offset-default.txt-Results] [GOOD] >> test.py::test[column_order-select_orderby-default.txt-Results] >> test.py::test[blocks-distinct_pure_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Results] >> test.py::test[window-win_multiaggr-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-Results] >> test.py::test[blocks-date_add_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_add_interval_scalar--Results] >> test.py::test[pg-tpcds-q46-default.txt-ForceBlocks] |96.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[union_all-union_all_multiple-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-empty_iterator--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_uint64--ForceBlocks] >> test.py::test[order_by-order_by_num_key_and_subkey--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] >> test.py::test[join-inner_all--Results] [GOOD] >> test.py::test[join-join_comp_common_table--ForceBlocks] >> test_row_dispatcher.py::TestPqRowDispatcher::test_filters_optional_field [GOOD] >> test.py::test[join-star_join_multi--Results] [GOOD] >> test.py::test[join-star_join_with_diff_complex_key--ForceBlocks] [SKIPPED] >> test.py::test[join-star_join_with_diff_complex_key--Results] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-ForceBlocks] >> test.py::test[sampling-take_with_sampling-default.txt-Results] [GOOD] >> test.py::test[schema-def_values--ForceBlocks] >> test.py::test[udf-udaf--Results] [GOOD] >> test.py::test[view-system_udf--ForceBlocks] >> test.py::test[schema-user_schema_bind-default.txt-Results] [GOOD] >> test.py::test[select-braces-default.txt-ForceBlocks] |96.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[order_by-order_by_dot_column-default.txt-Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--Results] >> test.py::test[pg-tpcds-q33-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q33-default.txt-Results] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] |96.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[expr-empty_iterator--Results] [GOOD] >> test.py::test[blocks-combine_hashed_sum_many_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_sum_many_keys--Results] >> test.py::test[blocks-combine_all_min_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] >> test.py::test[expr-as_table_emptylist--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] [GOOD] >> test.py::test[order_by-order_by_tuple_expr-default.txt-ForceBlocks] >> test.py::test[expr-as_table_emptylist--Results] >> test.py::test[window-win_func_over_group_by--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by--Results] >> test.py::test[schema-read_schema_other--Results] >> test.py::test[window-win_multiaggr-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-complex-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q07-default.txt-Results] >> test.py::test[window-win_func_lead_lag_opt--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Results] >> test.py::test[type_v3-append_diff_layout2--Results] >> test.py::test[blocks-date_add_interval_scalar--Results] [GOOD] >> test.py::test[blocks-if--ForceBlocks] >> test.py::test[type_v3-append_diff_layout2--Results] [SKIPPED] >> test.py::test[type_v3-replace_diff_layout--ForceBlocks] >> test.py::test[pg-tpcds-q33-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q42-default.txt-ForceBlocks] >> test.py::test[action-eval_range--ForceBlocks] [GOOD] >> test.py::test[action-eval_range--Results] >> test.py::test[expr-as_table_emptylist--Results] [GOOD] >> test.py::test[hor_join-table_record--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:54.687903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:54.687926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:54.687932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:54.687938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:54.687962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:54.687967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:54.687976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:54.687990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:54.688102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:54.688176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:54.700554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:54.700574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:54.700661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:54.702567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:54.702606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:54.702628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:54.703695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:54.703744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:54.703866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:54.703921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:54.704402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:54.704713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:54.704727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:54.704774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:54.704783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:54.704788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:54.704812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:54.706181Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:54.727085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:54.727149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.727201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:54.727270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:54.727282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.727865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:54.727886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:54.727936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.727944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:54.727949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:54.727954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:54.728303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.728315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:54.728319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:54.728741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.728751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.728756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:54.728762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:54.729397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:54.729797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:54.729826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:54.730001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:54.730023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:54.730030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:54.730083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:54.730090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:54.730120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:54.730130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:54.730482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:54.730490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:54.730516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:54.730521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:54.730530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:54.730535Z node 1 :FLAT_TX_SCHEMESHARD I ... AT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T10:00:14.195259Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T10:00:14.195334Z node 338 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.195342Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.195345Z node 338 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T10:00:14.195347Z node 338 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2025-05-05T10:00:14.195350Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T10:00:14.195357Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T10:00:14.195648Z node 338 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T10:00:14.195665Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-05-05T10:00:14.195683Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-05-05T10:00:14.195717Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-05-05T10:00:14.195719Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-05-05T10:00:14.195723Z node 338 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2025-05-05T10:00:14.195816Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T10:00:14.195837Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 1451698948203 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T10:00:14.195845Z node 338 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2025-05-05T10:00:14.195872Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-05-05T10:00:14.195882Z node 338 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T10:00:14.195886Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T10:00:14.195892Z node 338 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-05-05T10:00:14.195895Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T10:00:14.195903Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T10:00:14.195911Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T10:00:14.195917Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-05-05T10:00:14.195926Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-05-05T10:00:14.195930Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-05-05T10:00:14.195934Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-05-05T10:00:14.195943Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-05-05T10:00:14.195949Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-05-05T10:00:14.195953Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-05-05T10:00:14.195957Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-05-05T10:00:14.196277Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.196292Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.196552Z node 338 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T10:00:14.196559Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T10:00:14.196589Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-05-05T10:00:14.196605Z node 338 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T10:00:14.196608Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [338:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-05-05T10:00:14.196611Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [338:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-05-05T10:00:14.196713Z node 338 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.196720Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.196723Z node 338 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T10:00:14.196726Z node 338 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-05-05T10:00:14.196729Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-05-05T10:00:14.196822Z node 338 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.196830Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.196849Z node 338 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-05-05T10:00:14.196852Z node 338 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-05-05T10:00:14.196854Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-05-05T10:00:14.196862Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-05-05T10:00:14.196865Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [338:125:2151] 2025-05-05T10:00:14.196978Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T10:00:14.196986Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T10:00:14.196996Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T10:00:14.197338Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.197736Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-05-05T10:00:14.197776Z node 338 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-05-05T10:00:14.197789Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-05-05T10:00:14.197799Z node 338 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T10:00:14.197803Z node 338 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-05-05T10:00:14.197809Z node 338 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 1004, itemIdx# 4294967295 2025-05-05T10:00:14.197882Z node 338 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T10:00:14.198261Z node 338 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T10:00:14.198326Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T10:00:14.198334Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T10:00:14.198416Z node 338 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T10:00:14.198436Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T10:00:14.198440Z node 338 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [338:996:2930] TestWaitNotification: OK eventTxId 1004 >> test.py::test[insert-values_subquery--ForceBlocks] [SKIPPED] >> test.py::test[insert-values_subquery--Results] [SKIPPED] >> test.py::test[insert_monotonic-overlaping_fail--ForceBlocks] [SKIPPED] >> test.py::test[insert_monotonic-overlaping_fail--Results] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--Results] [SKIPPED] >> test.py::test[join-filter_joined--ForceBlocks] >> test.py::test[column_order-select_orderby-default.txt-Results] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] >> test.py::test[pg-tpcds-q46-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q46-default.txt-Results] >> test.py::test[blocks-distinct_pure_keys--Results] [GOOD] >> test.py::test[blocks-group_by_complex_key--ForceBlocks] >> test.py::test[aggregate-table_funcs_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-table_funcs_group_by-default.txt-Results] >> test.py::test[sampling-bind_topsort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_topsort-default.txt-Results] >> test.py::test[blocks-date_greater_or_equal_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] >> test.py::test[join-bush_dis_in_in_in--Results] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] [GOOD] >> test.py::test[window-win_func_aggr_hist--Results] >> test.py::test[optimizers-yql-7767_key_filter_with_view--ForceBlocks] >> test.py::test[ql_filter-integer_escaping--Results] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] [SKIPPED] >> test.py::test[sampling-subquery_limit-default.txt-Results] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-out_table_record-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_nested--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] >> test.py::test[schema-def_values--ForceBlocks] [GOOD] >> test.py::test[schema-def_values--Results] >> test.py::test[view-system_udf--ForceBlocks] [GOOD] >> test.py::test[view-system_udf--Results] >> test.py::test[join-join_comp_map_table-off-ForceBlocks] [GOOD] >> test.py::test[join-join_comp_map_table-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off-ForceBlocks] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--ForceBlocks] [GOOD] >> test.py::test[action-eval_range--Results] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] >> test.py::test[action-insert_after_eval_xlock--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join--Results] >> test.py::test[select-braces-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-braces-default.txt-Results] |96.6%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part17/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[file-parse_file_in_select_as_uint64--ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_uint64--Results] >> test.py::test[pg-tpcds-q46-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q55-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_presort--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables [GOOD] >> TExportToS3WithRebootsTests::CancelOnSingleShardTableWithChangefeed >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-Results] [SKIPPED] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-ForceBlocks] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] [GOOD] >> test.py::test[udf-two_regexps--Results] >> test.py::test[sampling-bind_topsort-default.txt-Results] [GOOD] >> test.py::test[sampling-map-keyfilter-ForceBlocks] >> test.py::test[aggregate-table_funcs_group_by-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_binding--ForceBlocks] >> test.py::test[view-system_udf--Results] [GOOD] >> test.py::test[weak_field-weak_field--ForceBlocks] >> test.py::test[schema-def_values--Results] [GOOD] >> test.py::test[schema-diffrerent_schemas--ForceBlocks] >> test.py::test[select-braces-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_hashed_max--Results] >> test.py::test[window-win_func_lead_lag_opt--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--ForceBlocks] >> test.py::test[pg-select_common_type_unionall--Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] >> test.py::test[order_by-order_by_tuple_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] >> test.py::test[pg-tpch-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-ForceBlocks] >> test.py::test[file-parse_file_in_select_as_uint64--Results] [GOOD] >> test.py::test[produce-discard_process_with_lambda-default.txt-ForceBlocks] >> test.py::test[schema-read_schema_other--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-read_schema-Results] >> test.py::test[flatten_by-flatten_columns-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] >> test.py::test[type_v3-replace_diff_layout--ForceBlocks] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Results] >> test.py::test[pg-tpcds-q42-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q42-default.txt-Results] |96.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test.py::test[ypath-complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-complex-default.txt-Results] >> test.py::test[schema-read_schema_change_other--ForceBlocks] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[blocks-if--ForceBlocks] [GOOD] >> test.py::test[blocks-if--Results] |96.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[select-braces-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_sum_many_keys--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--ForceBlocks] >> test.py::test[join-filter_joined--ForceBlocks] [GOOD] >> test.py::test[join-filter_joined--Results] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] [GOOD] >> test.py::test[pg-aggregate_minus_zero--ForceBlocks] >> test.py::test[hor_join-table_record--ForceBlocks] [GOOD] >> test.py::test[hor_join-table_record--Results] >> test.py::test[type_v3-replace_diff_layout--Results] [GOOD] >> test.py::test[udf-python_script_from_file--ForceBlocks] [SKIPPED] >> test.py::test[udf-python_script_from_file--Results] [SKIPPED] >> test.py::test[udf-python_struct--ForceBlocks] >> test.py::test[pg-tpcds-q42-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_parquet_converters_to_utf8[v2] [GOOD] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v1] >> test.py::test[udf-python_struct--ForceBlocks] [SKIPPED] >> test.py::test[udf-python_struct--Results] [SKIPPED] >> test.py::test[union-union_multiin--ForceBlocks] |96.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part17/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[sampling-subquery_limit-default.txt-Results] [GOOD] >> test.py::test[ypath-complex-default.txt-Results] [GOOD] |96.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-Results] >> test.py::test[action-discard-default.txt-ForceBlocks] >> test.py::test[blocks-group_by_complex_key--ForceBlocks] [GOOD] >> test.py::test[blocks-group_by_complex_key--Results] >> test.py::test[schema-read_schema_change_other--ForceBlocks] [GOOD] >> test.py::test[schema-read_schema_change_other--Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-ForceBlocks] >> test.py::test[pg-tpcds-q55-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q55-default.txt-Results] >> test.py::test[select-cast_double_to_uint32-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-7767_key_filter_with_view--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Results] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf--Results] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Results] >> test.py::test[hor_join-out_table_record-default.txt-Results] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-Results] >> test.py::test[action-insert_after_eval_xlock--ForceBlocks] [GOOD] >> test.py::test[action-insert_after_eval_xlock--Results] >> test.py::test[blocks-if--Results] [GOOD] >> test.py::test[blocks-interval_add_date--ForceBlocks] |96.7%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[udf-two_regexps--Results] [GOOD] >> test.py::test[view-system_udf--Results] >> test.py::test[hor_join-table_record--Results] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-ForceBlocks] |96.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[ypath-complex-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-read_schema-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Results] >> test.py::test[join-mergejoin_force_align3--ForceBlocks] >> test.py::test[join-join_without_correlation_names-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_names-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_not_selected--ForceBlocks] >> test.py::test[pg-tpcds-q55-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q60-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_force_align3--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align3--Results] [SKIPPED] >> test.py::test[join-mergejoin_semi_composite_to_inner--ForceBlocks] >> test.py::test[pg-tpcds-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-Results] >> test.py::test[join-filter_joined--Results] [GOOD] >> test.py::test[join-inner_with_select-off-ForceBlocks] >> test.py::test[sampling-map-keyfilter-ForceBlocks] [GOOD] >> test.py::test[sampling-map-keyfilter-Results] >> test.py::test[flatten_by-flatten_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-diffrerent_schemas--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] >> test.py::test[schema-diffrerent_schemas--Results] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-Results] >> test.py::test[join-join_comp_common_table--ForceBlocks] [GOOD] >> test.py::test[join-join_comp_common_table--Results] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--ForceBlocks] >> test.py::test[binding-table_regexp_binding--ForceBlocks] [GOOD] >> test.py::test[binding-table_regexp_binding--Results] >> test.py::test[weak_field-weak_field--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field--Results] >> test.py::test[produce-discard_process_with_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] [GOOD] >> test.py::test[order_by-assume_cut_prefix--ForceBlocks] >> test.py::test[blocks-group_by_complex_key--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic--ForceBlocks] >> test.py::test[window-win_func_aggr_hist--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Results] >> test.py::test[action-insert_after_eval_xlock--Results] [GOOD] >> test.py::test[action-runtime_if_select-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-Results] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] >> test.py::test[pg-tpcds-q73-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner--Results] >> test.py::test[blocks-combine_hashed_max--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] >> test.py::test[schema-diffrerent_schemas--Results] [GOOD] >> test.py::test[schema-row_spec_with_default_values--ForceBlocks] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] [SKIPPED] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node >> test.py::test[sampling-map-keyfilter-Results] [GOOD] >> test.py::test[join-bush_dis_in_in_in--Results] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off-ForceBlocks] >> test.py::test[sampling-subquery_expr-default.txt-ForceBlocks] >> test.py::test[union-union_multiin--ForceBlocks] [GOOD] >> test.py::test[union-union_multiin--Results] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] [GOOD] >> test.py::test[flatten_by-struct_without_correlation-default.txt-ForceBlocks] >> test.py::test[pg-aggregate_minus_zero--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Results] >> test.py::test[binding-table_regexp_binding--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt--ForceBlocks] >> test.py::test[schema-select_all-yamred_dsv-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Results] >> test.py::test[table_range-range_over_filter_udf--Results] [GOOD] >> test.py::test[tpch-q5-default.txt-Results] >> test.py::test[produce-discard_process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-discard_reduce_lambda--ForceBlocks] [SKIPPED] >> test.py::test[produce-discard_reduce_lambda--Results] [SKIPPED] >> test.py::test[produce-process_and_filter-default.txt-ForceBlocks] >> test.py::test[sampling-take_with_sampling-default.txt-Results] [GOOD] >> test.py::test[schema-def_values_job--Results] >> test.py::test[column_order-select_subquery-default.txt-Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas--Results] [SKIPPED] >> test.py::test[weak_field-weak_field--Results] [GOOD] >> test.py::test[weak_field-weak_field_wrong_types_fail--ForceBlocks] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-Results] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter1--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-split_input_with_key_filter1--Results] [SKIPPED] >> test.py::test[limit-yql-8611_calc_peephole--ForceBlocks] >> test.py::test[dq-precompute_result-default.txt-Results] [SKIPPED] >> test.py::test[dq-read_cost-default.txt-Results] [SKIPPED] >> test.py::test[expr-empty_iterator2--Results] >> test.py::test[pg-tpcds-q73-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q77-default.txt-ForceBlocks] >> test.py::test[select-cast_double_to_uint32-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] >> test.py::test[view-system_udf--Results] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] >> test.py::test[blocks-distinct_mixed_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Results] >> test.py::test[aggregate-group_by_ru_join--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted--Results] >> test.py::test[schema-select_all-yamred_dsv-Results] [GOOD] >> test.py::test[schema-select_with_map-partial_read_schema-ForceBlocks] >> test.py::test[expr-non_persistable_insert_into_fail--ForceBlocks] >> test.py::test[pg-tpch-q10-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Results] >> test.py::test[pg-tpcds-q29-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Results] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit--ForceBlocks] >> test.py::test[window-win_func_over_group_by_compl--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--Results] >> test.py::test[in-in_enum_single0-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Results] >> test.py::test[union-union_multiin--Results] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-ForceBlocks] >> test.py::test[action-discard-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-discard-default.txt-Results] >> test.py::test[schema-select_all_inferschema_range--Results] [GOOD] >> test.py::test[schema-select_fields_inferschema--Results] >> test.py::test[pg-tpcds-q60-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q60-default.txt-Results] >> test.py::test[join-inner_with_select-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_with_select-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner-off-ForceBlocks] >> test.py::test[pg-aggregate_minus_zero--Results] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-ForceBlocks] >> test.py::test[select-cast_double_to_uint32-default.txt-Results] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-ForceBlocks] >> test.py::test[hor_join-runtime_dep-default.txt-Results] [GOOD] >> test.py::test[hor_join-skip_sampling--Results] >> test.py::test[join-mergejoin_force_align3-off-ForceBlocks] |96.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-lookupjoin_not_selected--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_not_selected--Results] >> test.py::test[weak_field-weak_field_wrong_types_fail--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_wrong_types_fail--Results] [GOOD] >> test.py::test[window-generic/aggregations_include_current--ForceBlocks] >> test.py::test[join-anyjoin_common_nodup--ForceBlocks] >> test.py::test[blocks-interval_add_date--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_date--Results] >> test.py::test[expr-non_persistable_insert_into_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_insert_into_fail--Results] >> test.py::test[order_by-assume_cut_prefix--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] >> test.py::test[expr-non_persistable_insert_into_fail--Results] [GOOD] >> test.py::test[file-where_key_in_file_content--ForceBlocks] >> test.py::test[join-mergejoin_semi_composite_to_inner--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] >> test.py::test[in-in_enum_single0-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-ForceBlocks] >> test.py::test[schema-row_spec_with_default_values--ForceBlocks] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Results] >> test.py::test[blocks-lazy_nonstrict_basic--ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic--Results] >> test.py::test[pg-tpcds-q60-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-ForceBlocks] >> test.py::test[action-discard-default.txt-Results] [GOOD] >> test.py::test[action-eval_input_output_table--ForceBlocks] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_input_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack--Results] [SKIPPED] >> test.py::test[ql_filter-integer_members--Results] >> test.py::test[sampling-subquery_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Results] >> test.py::test[schema-def_values_job--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint64_opt--Results] >> test.py::test[action-runtime_if_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-runtime_if_select-default.txt-Results] >> test.py::test[produce-process_and_filter-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_and_filter-default.txt-Results] >> test.py::test[order_by-assume_cut_prefix--Results] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-ForceBlocks] >> test_ydb_backup.py::TestPermissionsBackupRestoreSchemeOnly::test_scheme_only >> test.py::test[flatten_by-struct_without_correlation-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected--Results] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Results] >> test.py::test[join-mapjoin_on_very_complex_type--ForceBlocks] >> test.py::test[pg-tpcds-q77-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] >> test.py::test[pg-tpcds-q77-default.txt-Results] >> test.py::test[action-evaluate_pure--ForceBlocks] >> test.py::test[join-bush_dis_in_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off-Results] [SKIPPED] >> test.py::test[join-emptyjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-emptyjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-inner_with_order--ForceBlocks] >> test.py::test[pg-tpch-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q14-default.txt-ForceBlocks] >> test.py::test[schema-select_with_map-partial_read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_with_map-partial_read_schema-Results] >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Results] [GOOD] |96.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[schema-def_values_job--Results] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--Results] [GOOD] >> test.py::test[expr-empty_iterator2--Results] [GOOD] >> test.py::test[join-pullup_extra_columns--Results] >> test.py::test[expr-non_persistable_inner_select_fail--Results] >> test.py::test[schema-select_fields_inferschema--Results] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-Results] >> test.py::test[blocks-interval_add_date--Results] [GOOD] >> test.py::test[blocks-sort_one_asc--ForceBlocks] >> test.py::test[window-win_func_over_group_by_compl--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr--ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_basic--Results] [GOOD] >> test.py::test[produce-process_and_filter-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-ForceBlocks] >> test.py::test[blocks-add_uint64_opt--Results] [GOOD] >> test.py::test[blocks-combine_hashed_pg--ForceBlocks] >> test.py::test[sampling-subquery_expr-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_mapjoin-default.txt-ForceBlocks] |96.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[blocks-distinct_mixed_keys--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] >> test.py::test[limit-insert_with_limit--ForceBlocks] [GOOD] >> test.py::test[limit-insert_with_limit--Results] >> test.py::test[weak_field-weak_field_aggregation--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--Results] >> test.py::test[weak_field-weak_field_real_col-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] >> test.py::test[action-runtime_if_select-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q77-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q08-default.txt-ForceBlocks] >> test.py::test[tpch-q5-default.txt-Results] [GOOD] |96.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] >> test.py::test[schema-select_with_map-partial_read_schema-Results] [GOOD] >> test.py::test[schema-select_yamr_fields--ForceBlocks] >> test.py::test[join-lookupjoin_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-off-ForceBlocks] >> test.py::test[pg-select_columnref2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-Results] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner-off-ForceBlocks] >> test.py::test[select-qualified_all_and_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[blocks-lazy_nonstrict_basic--Results] [GOOD] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] [GOOD] >> test.py::test[hor_join-group_sampling--ForceBlocks] >> test.py::test[window-win_func_lead_lag_opt--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr--Results] >> test.py::test[file-where_key_in_file_content--ForceBlocks] [GOOD] >> test.py::test[file-where_key_in_file_content--Results] >> test.py::test[expr-non_persistable_inner_select_fail--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_uint64--Results] >> test.py::test[join-mergejoin_force_align3-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_align3-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link-off-ForceBlocks] >> test.py::test[limit-insert_with_limit--Results] [GOOD] >> test.py::test[lineage-error_type--ForceBlocks] [SKIPPED] >> test.py::test[lineage-error_type--Results] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-sort_force--ForceBlocks] >> test.py::test[hor_join-skip_sampling--Results] [GOOD] >> test.py::test[multicluster-sort_force--ForceBlocks] [SKIPPED] >> test.py::test[multicluster-sort_force--Results] [SKIPPED] >> test.py::test[optimizers-reduce_with_aux_sort_column--ForceBlocks] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[tpch-q5-default.txt-Results] [GOOD] >> test.py::test[join-anyjoin_common_nodup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodup--Results] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_strict--ForceBlocks] >> test.py::test[ql_filter-integer_members--Results] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] >> test.py::test[blocks-date_less_or_equal--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] >> test.py::test[order_by-order_by_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-Results] >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-ForceBlocks] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[hor_join-skip_sampling--Results] [GOOD] >> test.py::test[file-where_key_in_file_content--Results] [GOOD] >> test.py::test[hor_join-out_mem_limit-default.txt-ForceBlocks] >> test.py::test[action-eval_input_output_table--ForceBlocks] [GOOD] >> test.py::test[action-eval_input_output_table--Results] >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_table1-default.txt-ForceBlocks] >> test.py::test[join-inner_with_order--ForceBlocks] [GOOD] >> test.py::test[join-inner_with_order--Results] >> test.py::test[pg-tpcds-q64-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-Results] >> test_ydb_backup.py::TestPermissionsBackupRestoreSchemeOnly::test_scheme_only [GOOD] >> test.py::test[limit-yql-8611_calc_peephole--ForceBlocks] [GOOD] >> test.py::test[limit-yql-8611_calc_peephole--Results] >> test.py::test[schema-user_schema_directread-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_with_sort--Results] >> test.py::test[join-pullup_extra_columns--Results] [GOOD] >> test.py::test[join-star_join_inners_premap--Results] >> test.py::test[pg-tpch-q14-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q14-default.txt-Results] >> test.py::test[pg-tpcds-q67-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-Results] >> test.py::test[action-evaluate_pure--ForceBlocks] [GOOD] >> test.py::test[action-evaluate_pure--Results] >> test.py::test[blocks-sort_one_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_one_asc--Results] >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> test.py::test[join-join_without_column-off-ForceBlocks] >> test.py::test[order_by-order_by_tuple-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--ForceBlocks] >> test.py::test[weak_field-weak_field_esc_yson--Results] [GOOD] >> test.py::test[weak_field-weak_field_rest--Results] >> test.py::test[schema-select_yamr_fields--ForceBlocks] [GOOD] >> test.py::test[schema-select_yamr_fields--Results] >> test.py::test[in-in_noansi_join--Results] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] >> test.py::test[action-subquery_merge_evaluate-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v1] [GOOD] >> test.py::test[sampling-subquery_mapjoin-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_mapjoin-default.txt-Results] >> test.py::test[action-eval_input_output_table--Results] [GOOD] >> test.py::test[aggr_factory-boolor-default.txt-ForceBlocks] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v2] >> test.py::test[join-lookupjoin_semi_1o2o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o-off-Results] >> test.py::test[join-lookupjoin_semi_1o2o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_left_null_column--ForceBlocks] >> test.py::test[join-mapjoin_on_very_complex_type--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type--Results] >> test.py::test[pg-tpcds-q64-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-ForceBlocks] >> test.py::test[file-parse_file_in_select_as_uint64--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_resource--Results] >> test.py::test[schema-select_yamr_fields--Results] [GOOD] >> test.py::test[select-if-default.txt-ForceBlocks] >> test.py::test[blocks-sort_one_asc--Results] [GOOD] >> test.py::test[column_group-groups-max-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-max-Results] [SKIPPED] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] >> test.py::test[pg-tpch-q14-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] [GOOD] >> test.py::test[action-table_content_before_from_folder--ForceBlocks] >> test.py::test[ql_filter-integer_single_disable_prune--Results] [GOOD] >> test.py::test[result_types-data-default.txt-Results] >> test.py::test[join-inner_with_order--Results] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted--ForceBlocks] >> test.py::test[window-win_func_part_by_expr--ForceBlocks] [GOOD] >> test.py::test[window-win_func_part_by_expr--Results] >> test.py::test[hor_join-group_sampling--ForceBlocks] [GOOD] >> test.py::test[hor_join-group_sampling--Results] >> test.py::test[action-evaluate_pure--Results] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-parallel_for-default.txt-ForceBlocks] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] >> test.py::test[join-mergejoin_force_per_link-off-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_strict--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_strict--Results] >> test.py::test[blocks-combine_hashed_pg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_pg--Results] >> test.py::test[hor_join-sorted_out--ForceBlocks] >> test.py::test[window-generic/aggregations_include_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Results] >> test.py::test[join-mergejoin_force_per_link-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off-ForceBlocks] >> test.py::test[join-mergejoin_semi_composite_to_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_left--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_left--Results] [SKIPPED] >> test.py::test[join-premap_common_cross-off-ForceBlocks] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[column_group-groups-max-Results] [SKIPPED] >> test.py::test[sampling-subquery_mapjoin-default.txt-Results] [GOOD] >> test.py::test[schema-insert_sorted-read_schema-ForceBlocks] >> test.py::test[insert-append_sorted-to_sorted-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Results] >> test.py::test[pg-select_table1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_table1-default.txt-Results] >> test.py::test[blocks-sub_uint64_opt2--ForceBlocks] >> test.py::test[select-scalar_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Results] >> test.py::test[pg-tpcds-q73-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Results] >> test.py::test[key_filter-convert--Results] >> test.py::test[optimizers-reduce_with_aux_sort_column--ForceBlocks] [GOOD] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[pg-tpch-q14-default.txt-Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] [GOOD] >> test.py::test[blocks-decimal_avg--Results] >> test.py::test[weak_field-weak_field_strict--Results] [GOOD] >> test.py::test[window-current/session--ForceBlocks] >> test.py::test[key_filter-utf8_with_legacy--ForceBlocks] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-ForceBlocks] >> test.py::test[window-win_func_part_by_expr--Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--Results] >> test.py::test[pg-tpch-q08-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q08-default.txt-Results] >> test.py::test[limit-yql-8611_calc_peephole--Results] [GOOD] >> test.py::test[lineage-select_union_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_union_all-default.txt-Results] [SKIPPED] >> test.py::test[hor_join-group_sampling--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr--Results] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-ForceBlocks] >> test.py::test[hor_join-group_yamr--ForceBlocks] >> test.py::test[lineage-unused_columns-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-unused_columns-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-map_force--ForceBlocks] [SKIPPED] >> test.py::test[multicluster-map_force--Results] [SKIPPED] >> test.py::test[multicluster-pull-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-pull-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--ForceBlocks] [SKIPPED] >> test.py::test[blocks-date_less_or_equal--Results] [GOOD] >> test.py::test[blocks-minmax_strings--ForceBlocks] >> test.py::test[insert-append_sorted-to_sorted-Results] [GOOD] >> test.py::test[insert-override-from_sorted-ForceBlocks] >> test.py::test[schema-user_schema_with_sort--Results] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Results] >> test.py::test[pg-select_table1-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Results] >> test.py::test[select-scalar_subquery-default.txt-Results] [GOOD] >> test.py::test[select-select_all-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q99-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Results] >> test.py::test[join-mapjoin_on_very_complex_type--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary-off-ForceBlocks] >> test.py::test[join-anyjoin_common_nodup--Results] [GOOD] >> test.py::test[join-cbo_4tables--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_4tables--Results] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk_eval--ForceBlocks] >> test.py::test[hor_join-out_mem_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_mem_limit-default.txt-Results] >> test.py::test[weak_field-weak_field_rest--Results] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] >> test.py::test[join-join_without_column-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_column-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] >> test.py::test[join-join_left_cbo--ForceBlocks] >> test.py::test[like-regexp_clause--Results] >> test.py::test[select-if-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-if-default.txt-Results] >> test.py::test[flatten_by-flatten_with_resource--Results] [GOOD] >> test.py::test[hor_join-merge_multiouts_part--Results] [SKIPPED] >> test.py::test[hor_join-out_mem_limit-default.txt-Results] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--ForceBlocks] [SKIPPED] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[pg-select_table1-default.txt-Results] [GOOD] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] [GOOD] >> test.py::test[optimizers-sorted_scalar_content--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--Results] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--Results] [SKIPPED] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_left_null_column--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_left_null_column--Results] >> test.py::test[join-join_with_duplicate_keys_on_sorted--ForceBlocks] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] >> test.py::test[blocks-combine_hashed_pg--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--ForceBlocks] >> test.py::test[aggr_factory-boolor-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-boolor-default.txt-Results] >> test.py::test[order_by-order_by_udf_duo--Results] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q99-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q95-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-Results] >> test.py::test[window-generic/aggregations_include_current--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-ForceBlocks] >> test.py::test[select-if-default.txt-Results] [GOOD] >> test.py::test[select-logical_ops-default.txt-ForceBlocks] >> test.py::test[key_filter-convert--Results] [GOOD] >> test.py::test[key_filter-empty_range--ForceBlocks] >> test.py::test[blocks-sub_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-sub_uint64_opt2--Results] >> test.py::test[result_types-data-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--Results] >> test.py::test[join-mergejoin_narrows_output_sort-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort-off-Results] [SKIPPED] >> test.py::test[hor_join-sorted_out--ForceBlocks] [GOOD] >> test.py::test[hor_join-sorted_out--Results] >> test.py::test[action-parallel_for-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-parallel_for-default.txt-Results] >> test.py::test[join-premap_common_cross--ForceBlocks] >> test.py::test[join-premap_common_cross-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner--ForceBlocks] >> test.py::test[action-table_content_before_from_folder--ForceBlocks] [GOOD] >> test.py::test[action-table_content_before_from_folder--Results] >> test.py::test[schema-insert_sorted-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-insert_sorted-read_schema-Results] >> test.py::test[pg-tpch-q08-default.txt-Results] [GOOD] >> test.py::test[pg-wide_sort--ForceBlocks] >> test.py::test[join-mapjoin_left_null_column--Results] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off-ForceBlocks] >> test.py::test[select-backtick_with_escapes-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf-default.txt-Results] >> test.py::test[join-star_join_inners_premap--Results] [GOOD] >> test.py::test[join-star_join_mirror--Results] >> test.py::test[hor_join-group_yamr--ForceBlocks] [GOOD] >> test.py::test[hor_join-group_yamr--Results] >> test.py::test[select-complex_filter_with_order-default.txt-Results] >> test.py::test[blocks-sub_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-type_and_callable_stats--ForceBlocks] >> test.py::test[select-select_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all-default.txt-Results] >> test.py::test[weak_field-weak_field_to_yson--Results] [GOOD] >> test.py::test[insert-override-from_sorted-ForceBlocks] [GOOD] >> test.py::test[insert-override-from_sorted-Results] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--ForceBlocks] >> test.py::test[window-full/session--Results] >> test.py::test[aggr_factory-boolor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-max-default.txt-ForceBlocks] >> test.py::test[like-regexp_clause--Results] [GOOD] >> test.py::test[limit-dynamic_sort_limit--Results] [SKIPPED] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] >> test.py::test[hor_join-out_mem_limit-default.txt-Results] [GOOD] >> test.py::test[hor_join-out_range-default.txt-ForceBlocks] >> test.py::test[schema-insert_sorted-read_schema-Results] [GOOD] >> test.py::test[schema-other--ForceBlocks] [SKIPPED] >> test.py::test[schema-other--Results] [SKIPPED] >> test.py::test[schema-other_job--ForceBlocks] [SKIPPED] >> test.py::test[schema-other_job--Results] [SKIPPED] >> test.py::test[schema-patchtype--ForceBlocks] >> test.py::test[column_group-hint_diff_grp_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail--Results] [SKIPPED] >> test.py::test[column_group-publish-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-publish-single-Results] [SKIPPED] >> test.py::test[column_group-respull--ForceBlocks] >> test.py::test[action-parallel_for-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt-ForceBlocks] >> test.py::test[hor_join-sorted_out--Results] [GOOD] >> test.py::test[in-in_exists_immediate_nested_subq--ForceBlocks] [SKIPPED] >> test.py::test[in-in_exists_immediate_nested_subq--Results] [SKIPPED] >> test.py::test[insert-append-with_view-ForceBlocks] [SKIPPED] >> test.py::test[insert-append-with_view-Results] >> test.py::test[column_group-respull--ForceBlocks] [SKIPPED] >> test.py::test[column_group-respull--Results] [SKIPPED] >> test.py::test[column_order-insert_reorder_without_columnorder--ForceBlocks] >> test.py::test[action-table_content_before_from_folder--Results] [GOOD] >> test.py::test[aggregate-compact_distinct--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-compact_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-ForceBlocks] >> test.py::test[insert-append-with_view-Results] [SKIPPED] >> test.py::test[insert-select_after_replace_unwrap-default.txt-ForceBlocks] >> test.py::test[hor_join-group_yamr--Results] [GOOD] >> test.py::test[hor_join-out_table_record-default.txt-ForceBlocks] >> test.py::test[window-win_func_rank_by_opt_all--Results] [GOOD] >> test.py::test[window-yql-14479-default.txt-Results] >> test.py::test[produce-process_with_udf-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_list_table--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_lambda_list_table--Results] [SKIPPED] >> test.py::test[sampling-bind_join_right-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_choose_primary-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted--ForceBlocks] >> test.py::test[join-join_left_cbo--ForceBlocks] [GOOD] >> test.py::test[join-join_left_cbo--Results] >> test.py::test[key_filter-utf8_with_legacy--ForceBlocks] [GOOD] >> test.py::test[key_filter-utf8_with_legacy--Results] >> test.py::test[blocks-decimal_avg--Results] [GOOD] >> test.py::test[blocks-decimal_unary--Results] >> test.py::test[select-select_all-default.txt-Results] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-ForceBlocks] >> test.py::test[window-win_func_with_struct_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Results] >> test.py::test[join-equi_join_three_asterisk_eval--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_asterisk_eval--Results] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] >> test.py::test[blocks-minmax_strings--ForceBlocks] [GOOD] >> test.py::test[blocks-minmax_strings--Results] >> test.py::test[insert-override-from_sorted-Results] [GOOD] >> test.py::test[insert-override-proto-ForceBlocks] >> test.py::test[pg-tpcds-q24-default.txt-ForceBlocks] >> test.py::test[window-current/session--ForceBlocks] [GOOD] >> test.py::test[window-current/session--Results] >> test.py::test[select-logical_ops-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-logical_ops-default.txt-Results] >> test.py::test[sampling-direct_read--Results] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Results] >> test.py::test[order_by-presort_order_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] >> test.py::test[pg-tpch-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q11-default.txt-Results] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Results] >> test.py::test[join-premap_common_inner--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner--Results] >> test_format_setting.py::TestS3::test_parquet_converters_to_date[v2] [GOOD] >> test.py::test[join-left_join_null_column-off-ForceBlocks] >> test.py::test[pg-tpch-q13-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] >> test.py::test[join-premap_common_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_cross--Results] >> test.py::test[pg-wide_sort--ForceBlocks] [GOOD] >> test.py::test[join-join_left_cbo--Results] [GOOD] >> test.py::test[join-join_right_cbo--ForceBlocks] >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] >> test.py::test[select-complex_filter_with_order-default.txt-Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Results] >> test.py::test[join-lookupjoin_semi_subq--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] >> test.py::test[select-logical_ops-default.txt-Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-ForceBlocks] >> test.py::test[key_filter-utf8_with_legacy--Results] [GOOD] >> test.py::test[lineage-window_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts--Results] [SKIPPED] >> test.py::test[order_by-literal_with_assume--ForceBlocks] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-ForceBlocks] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_very_complex_type-off-ForceBlocks] >> test.py::test[produce-reduce_with_python_row_repack--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack--Results] [SKIPPED] >> test.py::test[ql_filter-integer_many_right--ForceBlocks] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] >> test.py::test[blocks-type_and_callable_stats--ForceBlocks] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Results] >> test.py::test[blocks-distinct_opt_state_all--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--Results] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] [GOOD] >> test.py::test[insert-insert_null-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[pg-wide_sort--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-15210_sqlin--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-15210_sqlin--Results] [SKIPPED] >> test.py::test[optimizers-yql-3455_filter_sorted--ForceBlocks] >> test.py::test[aggregate-agg_phases_table1-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] >> test.py::test[blocks-decimal_unary--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Results] >> test.py::test[hor_join-out_table_record-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_table_record-default.txt-Results] >> test.py::test[column_order-insert_reorder_without_columnorder--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] >> test.py::test[window-current/session--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--ForceBlocks] >> test.py::test[join-equi_join_three_asterisk_eval--Results] [GOOD] >> test.py::test[join-equi_join_two_mult_keys--ForceBlocks] >> test.py::test[join-mergejoin_force_no_sorted--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted--Results] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 09:59:39] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 09:59:40] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 09:59:42] "GET /mylib.sql HTTP/1.1" 200 - >> test.py::test[join-premap_common_inner--Results] [GOOD] >> test.py::test[join-premap_common_inner_filter--ForceBlocks] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_by--Results] >> test.py::test[pg-tpcds-q24-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q24-default.txt-Results] >> test.py::test[select-table_funcs_spec-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Results] >> test.py::test[insert-select_after_replace_unwrap-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] >> test.py::test[aggr_factory-max-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-max-default.txt-Results] >> test.py::test[sampling-bind_join_right-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Results] >> test.py::test[join-premap_common_cross--Results] [GOOD] >> test.py::test[join-premap_common_inner_filter-off-ForceBlocks] >> test.py::test[hor_join-out_range-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_range-default.txt-Results] >> test.py::test[insert-override-proto-ForceBlocks] [GOOD] >> test.py::test[insert-override-proto-Results] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] >> test.py::test[blocks-type_and_callable_stats--Results] [GOOD] >> test.py::test[column_group-hint_append2--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append2--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5--Results] [SKIPPED] >> test.py::test[column_group-length-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-length-single-Results] [SKIPPED] >> test.py::test[column_order-select_plain-default.txt-ForceBlocks] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-ForceBlocks] >> test.py::test[blocks-minmax_strings--Results] [GOOD] >> test.py::test[blocks-nested_optionals--ForceBlocks] >> test.py::test[hor_join-out_table_record-default.txt-Results] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt-ForceBlocks] >> test.py::test[schema-patchtype--ForceBlocks] [GOOD] >> test.py::test[schema-patchtype--Results] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--ForceBlocks] >> test.py::test[hor_join-out_mem_limit-default.txt-Results] [GOOD] >> test.py::test[in-in_compact_distinct--Results] >> test.py::test[insert-override-proto-Results] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--ForceBlocks] >> test.py::test[pg-tpcds-q24-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-ForceBlocks] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-ForceBlocks] >> test.py::test[select-table_funcs_spec-default.txt-Results] [GOOD] >> test.py::test[select-type_assert-default.txt-ForceBlocks] >> test.py::test[insert-select_after_replace_unwrap-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_before_commit_fail--ForceBlocks] >> test.py::test[join-left_join_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-left_join_null_column-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_optional--ForceBlocks] >> test.py::test[join-mergejoin_force_no_sorted--Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Results] [GOOD] >> test.py::test[select-if-default.txt-Results] >> test.py::test[join-join_right_cbo--ForceBlocks] [GOOD] >> test.py::test[join-join_right_cbo--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--ForceBlocks] >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-ForceBlocks] >> test.py::test[key_filter-empty_range--ForceBlocks] [GOOD] >> test.py::test[key_filter-empty_range--Results] >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] >> test.py::test[window-yql-14738-default.txt-Results] >> test.py::test[sampling-bind_join_right-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-ForceBlocks] >> test.py::test[insert-insert_null-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta--Results] >> test_ydb_backup.py::TestPermissionsBackupRestoreEmptyDir::test_empty_dir >> test.py::test[pg-select_from_columns_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-Results] >> test.py::test[order_by-literal_with_assume--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_with_assume--Results] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] >> test.py::test[ql_filter-integer_many_right--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_right--Results] >> test.py::test[aggr_factory-max-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-ForceBlocks] >> test.py::test[join-star_join_mirror--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap--Results] >> test.py::test[sampling-join_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-map-dynamic-Results] >> test.py::test[schema-patchtype--Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv_raw-ForceBlocks] >> test.py::test[insert-use_anon_table_without_fill_fail--ForceBlocks] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] [GOOD] >> test.py::test[insert-yql-14538--ForceBlocks] |96.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[join-mergejoin_force_no_sorted--Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_expr_key--ForceBlocks] >> test.py::test[aggregate-group_by_session_compact--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended--Results] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] >> test.py::test[hor_join-out_range-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted--ForceBlocks] >> test.py::test[blocks-distinct_opt_state_all--Results] [GOOD] >> test.py::test[blocks-sort_one_desc--ForceBlocks] >> test.py::test[insert-use_anon_table_before_commit_fail--ForceBlocks] [GOOD] >> test.py::test[insert-use_anon_table_before_commit_fail--Results] [GOOD] >> test.py::test[insert-yql-13083-existig-ForceBlocks] >> test.py::test[pg-select_from_columns_star-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_with_assume--Results] [GOOD] >> test.py::test[order_by-sort_decimals--ForceBlocks] >> test.py::test[join-equi_join_two_mult_keys--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_two_mult_keys--Results] >> test.py::test[pg-select_subquery-default.txt-ForceBlocks] >> test.py::test[lineage-flatten_by--Results] [GOOD] >> test.py::test[lineage-member_over_if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-map_force--Results] >> test.py::test[ql_filter-integer_many_right--Results] [GOOD] >> test.py::test[ql_filter-integer_members--ForceBlocks] >> test.py::test[multicluster-map_force--Results] [SKIPPED] >> test.py::test[multicluster-pull-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-direct_row_after_merge--Results] >> test.py::test[join-premap_common_inner_filter-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_filter-off-Results] [SKIPPED] >> test.py::test[join-pullup_cross--ForceBlocks] >> test.py::test[join-premap_common_inner_filter--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_filter--Results] >> test.py::test[join-mapjoin_on_very_complex_type-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_any_no_join_reduce--ForceBlocks] >> test.py::test[join-join_right_cbo--Results] [GOOD] >> test.py::test[join-left_join_null_column--ForceBlocks] >> test.py::test[select-multi_source_issue-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-3455_filter_sorted--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] >> test.py::test[blocks-nested_optionals--ForceBlocks] [GOOD] >> test.py::test[blocks-nested_optionals--Results] >> test.py::test[pg-tpcds-q37-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-Results] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] >> test.py::test[hor_join-yql-6477_table_path-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] >> test.py::test[select-if-default.txt-Results] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] >> test.py::test[order_by-native_desc_sort_with_limit--ForceBlocks] >> test.py::test[order_by-native_desc_sort_with_limit--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-order_by_list_of_strings--ForceBlocks] >> test.py::test[select-type_assert-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-type_assert-default.txt-Results] >> test.py::test[insert-keepmeta--Results] [GOOD] >> test.py::test[insert-keepmeta_proto_fail--Results] >> test.py::test[join-equi_join_two_mult_keys--Results] [GOOD] >> test.py::test[join-equi_join_two_mult_keys-off-ForceBlocks] >> test.py::test[key_filter-empty_range--Results] [GOOD] >> test.py::test[limit-dynamic_limit--ForceBlocks] [SKIPPED] >> test.py::test[limit-dynamic_limit--Results] [SKIPPED] >> test.py::test[column_order-select_plain-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_plain-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_optional--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_optional--Results] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--ForceBlocks] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--Results] >> test_ydb_backup.py::TestPermissionsBackupRestoreEmptyDir::test_empty_dir [GOOD] >> test.py::test[lineage-group_by_asstruct_key-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt-Results] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-process-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-process-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-Results] [SKIPPED] >> test.py::test[lineage-scalar_context--ForceBlocks] [SKIPPED] >> test.py::test[lineage-scalar_context--Results] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_default-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-ForceBlocks] >> test.py::test[schema-select_all-yamred_dsv_raw-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-yamred_dsv_raw-Results] >> test.py::test[blocks-nested_optionals--Results] [GOOD] >> test.py::test[blocks-tuple_type--ForceBlocks] >> test.py::test[lineage-select_all_filter-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads--ForceBlocks] >> test.py::test[sampling-subquery_default-default.txt-Results] >> test.py::test[join-premap_common_inner_filter--Results] [GOOD] >> test.py::test[join-pullup_left_semi--ForceBlocks] >> test.py::test[bigdate-table_yt_key_filter-on-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-on-Results] [SKIPPED] >> test.py::test[binding-table_concat_strict_binding-default.txt-ForceBlocks] >> test.py::test[sampling-map-dynamic-Results] [GOOD] >> test.py::test[schema-fake_column-default.txt-Results] >> test.py::test[join-mapjoin_opt_vs_2xopt--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star1--ForceBlocks] >> test.py::test[hor_join-yql-6477_table_path-default.txt-Results] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin--ForceBlocks] >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join-off-ForceBlocks] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] [GOOD] >> test.py::test[pg-select_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf--Results] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--ForceBlocks] >> test.py::test[in-in_compact_distinct--Results] [GOOD] >> test.py::test[select-type_assert-default.txt-Results] [GOOD] >> test.py::test[window-full/session--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-ForceBlocks] >> test.py::test[window-full/session_aliases--Results] >> test.py::test[in-in_sorted--ForceBlocks] [GOOD] >> test.py::test[in-in_sorted--Results] >> test.py::test[schema-select_all-yamred_dsv_raw-Results] [GOOD] >> test.py::test[ql_filter-integer_members--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_members--Results] >> test.py::test[insert-keepmeta_proto_fail--Results] [GOOD] >> test.py::test[blocks-sort_one_desc--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_with_read_udf_fail--Results] >> test_ydb_backup.py::TestRestoreACLOption::test_restore_acl_option >> test.py::test[blocks-sort_one_desc--Results] |96.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[optimizers-yql-3455_filter_sorted--Results] [GOOD] |96.9%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part19/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-group_by_hop_expr_key--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_expr_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_duo--ForceBlocks] >> test.py::test[sampling-subquery_default-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_filter-default.txt-ForceBlocks] >> test.py::test[optimizers-direct_row_after_merge--Results] [GOOD] >> test.py::test[optimizers-keep_sort_with_renames--Results] |96.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[in-in_compact_distinct--Results] [GOOD] |96.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[schema-select_all-yamred_dsv_raw-Results] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] >> test.py::test[produce-process_streaming_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-Results] >> test.py::test[insert-yql-14538--ForceBlocks] [GOOD] >> test.py::test[insert-yql-14538--Results] >> test.py::test[pg-tpch-q11-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-Results] >> test.py::test[column_order-select_plain-default.txt-Results] [GOOD] >> test.py::test[count-count_all-default.txt-ForceBlocks] >> test.py::test[join-left_join_right_pushdown_optional--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-off-ForceBlocks] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] >> test.py::test[pg-select_subquery-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-ForceBlocks] >> test.py::test[join-pullup_cross--ForceBlocks] [GOOD] >> test.py::test[join-pullup_cross--Results] >> test.py::test[blocks-distinct_mixed_keys--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--Results] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--ForceBlocks] >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-Results] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] >> test.py::test[aggr_factory-variance-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Results] >> test.py::test[join-star_join_semionly_premap--Results] [GOOD] >> test.py::test[join-yql-16011--Results] [SKIPPED] >> test.py::test[json-json_exists/example--Results] >> test.py::test[blocks-sort_one_desc--Results] [GOOD] >> test.py::test[blocks-top_sort_one_desc--ForceBlocks] >> test.py::test[join-left_join_null_column--ForceBlocks] [GOOD] >> test.py::test[join-left_join_null_column--Results] >> test.py::test[ql_filter-integer_members--Results] [GOOD] >> test.py::test[schema-insert-row_spec-ForceBlocks] >> test.py::test[order_by-sort_decimals--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_decimals--Results] >> test.py::test[aggregate-group_by_session_extended--Results] [GOOD] >> test.py::test[aggregate-no_compact_distinct--Results] [SKIPPED] >> test.py::test[aggregate-percentile_and_variance--Results] >> test.py::test[order_by-order_by_list_of_strings--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Results] >> test.py::test[insert-keepmeta_with_read_udf_fail--Results] [GOOD] >> test.py::test[insert-override-from_sorted-Results] >> test.py::test[action-eval_atom_wrong_type_expr--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Results] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--ForceBlocks] >> test.py::test[window-full/noncompact_with_nulls_tuple_key--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] >> test.py::test[insert-yql-13083-existig-ForceBlocks] [GOOD] >> test.py::test[insert-yql-13083-existig-Results] >> test.py::test[in-in_sorted--Results] [GOOD] >> test.py::test[insert-append_after_replace-default.txt-ForceBlocks] >> test.py::test[blocks-tuple_type--ForceBlocks] [GOOD] >> test.py::test[blocks-tuple_type--Results] >> test.py::test[optimizers-unused_columns_window_no_payloads--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] >> test.py::test[produce-process_streaming_count-default.txt-Results] [GOOD] >> test.py::test[produce-process_trivial_as_struct-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q51-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Results] |96.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> test.py::test[insert-yql-14538--Results] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--ForceBlocks] >> test.py::test[join-equi_join_two_mult_keys-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_two_mult_keys-off-Results] [SKIPPED] >> test.py::test[join-flatten_columns1--ForceBlocks] >> test.py::test[join-mergejoin_any_no_join_reduce--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary--ForceBlocks] >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] >> test.py::test[join-pullup_left_semi--ForceBlocks] [GOOD] >> test.py::test[join-pullup_left_semi--Results] >> test.py::test[join-pullup_cross--Results] [GOOD] >> test.py::test[join-pullup_exclusion--ForceBlocks] >> test.py::test[order_by-order_by_list_of_strings--Results] [GOOD] >> test.py::test[pg-insert--ForceBlocks] >> test.py::test[distinct-distinct_star1--ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star1--Results] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--ForceBlocks] >> test.py::test[aggregate-aggregate_udf_nested--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--Results] >> test.py::test[join-nested_semi_join-off-ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] [GOOD] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Results] [SKIPPED] >> test.py::test[order_by-literal_empty_list_sort--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] >> test.py::test[order_by-sort_decimals--Results] [GOOD] >> test.py::test[order_by-sort_simple--ForceBlocks] >> test.py::test[blocks-tuple_type--Results] [GOOD] >> test.py::test[column_group-hint_anon_groups-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-single-Results] [SKIPPED] >> test.py::test[column_group-hint_append--ForceBlocks] >> test.py::test[action-eval_on_modif_table_fail--ForceBlocks] [GOOD] >> test.py::test[action-eval_on_modif_table_fail--Results] [GOOD] >> test.py::test[aggr_factory-count-default.txt-ForceBlocks] |96.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] |96.9%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part19/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[column_group-hint_append--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append--Results] [SKIPPED] >> test.py::test[column_order-select_where-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-ForceBlocks] >> test.py::test[join-left_join_null_column--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--ForceBlocks] >> test.py::test[sampling-subquery_filter-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_filter-default.txt-Results] >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-ForceBlocks] >> test.py::test[insert-yql-13083-existig-Results] [GOOD] >> test.py::test[insert_monotonic-from_empty--ForceBlocks] >> test.py::test[insert_monotonic-break_sort_fail--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--Results] [GOOD] >> test.py::test[insert_monotonic-keep_unique--ForceBlocks] [SKIPPED] >> test.py::test[insert_monotonic-keep_unique--Results] [SKIPPED] >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-block_input_mapreduce--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_mapreduce--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Results] [SKIPPED] >> test.py::test[blocks-combine_hashed_min--ForceBlocks] >> test_ydb_backup.py::TestRestoreACLOption::test_restore_acl_option [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Results] |96.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-nested_semi_join-off-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-ForceBlocks] >> test.py::test[insert-override-from_sorted-Results] [GOOD] >> test.py::test[insert-override-with_view-Results] [SKIPPED] >> test.py::test[insert_monotonic-several2-default.txt-Results] >> test.py::test[join-mergejoin_big_primary_unique--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--Results] >> test.py::test[count-count_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_all-default.txt-Results] >> test.py::test[join-pullup_left_semi--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left-off-ForceBlocks] >> test.py::test[distinct-distinct_star1--Results] [GOOD] >> test.py::test[dq-read_cost-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_udf_nested--Results] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt-ForceBlocks] >> test.py::test[blocks-top_sort_one_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_one_desc--Results] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_filter-default.txt-Results] [GOOD] >> test.py::test[sampling-zero_percentage--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo--Results] >> test.py::test[count-count_all-default.txt-Results] [GOOD] >> test.py::test[count-count_by_nulls--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_subst-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o--ForceBlocks] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] >> test.py::test[produce-process_trivial_as_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-insert-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-insert-row_spec-Results] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] >> test.py::test[pg-tpcds-q72-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_choose_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary--Results] >> test.py::test[join-mergejoin_big_primary_unique--Results] [GOOD] >> test.py::test[join-mergejoin_with_table_range--ForceBlocks] >> test.py::test[insert-append_after_replace-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-append_after_replace-default.txt-Results] >> test.py::test[in-in_tablesource_to_equijoin--ForceBlocks] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin--Results] >> test.py::test[optimizers-keep_sort_with_renames--Results] [GOOD] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] >> test.py::test[like-regexp_clause--ForceBlocks] >> test.py::test[blocks-date_sub--ForceBlocks] >> test.py::test[join-flatten_columns1--ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns1--Results] >> test.py::test[order_by-literal_empty_list_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Results] >> test.py::test[blocks-top_sort_one_desc--Results] [GOOD] >> test.py::test[column_group-groups-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-single-Results] [SKIPPED] >> test.py::test[column_group-length-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-length-perusage-Results] >> test.py::test[column_group-length-perusage-Results] [SKIPPED] >> test.py::test[column_order-select_groupby_with_star-default.txt-ForceBlocks] >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[pg-insert--ForceBlocks] [GOOD] >> test.py::test[pg-insert--Results] >> test.py::test[join-pullup_exclusion--ForceBlocks] [GOOD] >> test.py::test[join-pullup_exclusion--Results] >> test.py::test[pg-tpcds-q59-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-Results] >> test.py::test[schema-insert-row_spec-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-ForceBlocks] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-ForceBlocks] >> test.py::test[json-json_exists/example--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--Results] >> test.py::test[column_order-select_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] >> test.py::test[insert_monotonic-from_empty--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-from_empty--Results] >> test.py::test[order_by-literal_empty_list_sort--Results] [GOOD] >> test.py::test[order_by-native_desc_sort--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort--Results] [SKIPPED] >> test.py::test[insert-append_after_replace-default.txt-Results] [GOOD] >> test.py::test[insert-insert_null-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_min--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_min--Results] >> test.py::test[window-yql-14738-default.txt-Results] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-Results] |96.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] >> test.py::test[join-mergejoin_choose_primary--Results] [GOOD] >> test.py::test[pg-insert--Results] [GOOD] >> test.py::test[pg-name--ForceBlocks] >> test.py::test[aggr_factory-count-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Results] >> test.py::test[dq-read_cost-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-read_cost-default.txt-Results] [SKIPPED] >> test.py::test[epochs-reset_sortness_on_append--ForceBlocks] |96.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[json-json_exists/example--Results] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q91-default.txt-ForceBlocks] >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-several2-default.txt-Results] [GOOD] >> test.py::test[join-compact_join--Results] >> test.py::test[join-flatten_columns1--Results] [GOOD] >> test.py::test[join-flatten_columns1-off-ForceBlocks] >> test.py::test[udf-udaf_short--ForceBlocks] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] [GOOD] >> test.py::test[select-where_not_null--Results] >> test.py::test[join-pushdown_filter_over_left-off-ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_left-off-Results] [SKIPPED] >> test.py::test[join-three_equalities-off-ForceBlocks] >> test.py::test[insert_monotonic-from_empty--Results] [GOOD] >> test.py::test[join-convert_key--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_with_select_distinct--ForceBlocks] >> test.py::test[pg-tpcds-q54-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] [GOOD] >> test.py::test[count-count_all_grouped--ForceBlocks] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[order_by-native_desc_sort--Results] [SKIPPED] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[join-mergejoin_choose_primary--Results] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-Results] >> test.py::test[schema-select_all_forceinferschema--ForceBlocks] [SKIPPED] >> test.py::test[schema-select_all_forceinferschema--Results] [SKIPPED] >> test.py::test[schema-user_schema_with_sort--ForceBlocks] >> test.py::test[sampling-zero_percentage--ForceBlocks] [GOOD] >> test.py::test[sampling-zero_percentage--Results] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_simple--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_simple--Results] >> test.py::test[aggregate-avg_and_sum-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt-Results] >> test.py::test[stream_lookup_join-lookup_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_simple--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] [GOOD] >> test.py::test[table_range-merge_non_strict--ForceBlocks] >> test.py::test[count-count_by_nulls--ForceBlocks] [GOOD] >> test.py::test[count-count_by_nulls--Results] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Results] [SKIPPED] >> test.py::test[table_range-concat_sorted_max_tables--ForceBlocks] >> test.py::test[blocks-combine_hashed_min--Results] [GOOD] >> test.py::test[join-pullup_exclusion--Results] [GOOD] >> test.py::test[join-pullup_inner-off-ForceBlocks] >> test.py::test[aggr_factory-count-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q16-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Results] [SKIPPED] >> test.py::test[produce-process_multi_in--Results] [SKIPPED] >> test.py::test[blocks-combine_hashed_minmax_double--ForceBlocks] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda--ForceBlocks] >> test.py::test[join-mergejoin_with_table_range--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Results] >> test.py::test[join-lookupjoin_inner_1o2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Results] >> test.py::test[pg-tpcds-q72-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:46.859634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:46.859665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:46.859671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:46.859676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:46.859682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:46.859687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:46.859695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:46.859711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:46.859815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:46.859902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:46.873603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:46.873632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:46.873728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:46.876179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:46.876251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:46.876281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:46.878100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:46.878159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:46.878321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.878384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:46.879086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.879386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:46.879400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.879457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:46.879466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:46.879473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:46.879500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:46.881449Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:46.906100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:46.906194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.906259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:46.906333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:46.906346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.907384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.907433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:46.907507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.907519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:46.907525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:46.907531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:46.908080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.908116Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:46.908122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:46.908550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.908562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.908569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.908577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:46.909329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:46.909914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:46.909965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:46.910181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:46.910214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:46.910221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.910287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:46.910298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:46.910339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:46.910353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:46.916275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:46.916297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:46.916356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:46.916363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:46.916377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:46.916387Z node 1 :FLAT_TX_SCHEMESHARD I ... Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-05-05T10:00:50.544087Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T10:00:50.544213Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T10:00:50.544221Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-05-05T10:00:50.544232Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T10:00:50.544456Z node 404 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T10:00:50.544480Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T10:00:50.544485Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T10:00:50.544490Z node 404 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T10:00:50.544584Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T10:00:50.544606Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2025-05-05T10:00:50.544737Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T10:00:50.544758Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 1735166789732 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T10:00:50.544766Z node 404 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2025-05-05T10:00:50.544792Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T10:00:50.544801Z node 404 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T10:00:50.544805Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T10:00:50.544810Z node 404 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T10:00:50.544813Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T10:00:50.544823Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T10:00:50.544833Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T10:00:50.544839Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T10:00:50.544845Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T10:00:50.544849Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T10:00:50.544852Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T10:00:50.544862Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-05-05T10:00:50.544871Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T10:00:50.544875Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-05-05T10:00:50.544878Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-05-05T10:00:50.545382Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-05-05T10:00:50.545392Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-05-05T10:00:50.545447Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T10:00:50.545457Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-05-05T10:00:50.545460Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-05-05T10:00:50.545721Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T10:00:50.545735Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T10:00:50.546002Z node 404 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T10:00:50.546009Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T10:00:50.546040Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-05-05T10:00:50.546057Z node 404 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T10:00:50.546060Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [404:207:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T10:00:50.546064Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [404:207:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T10:00:50.546195Z node 404 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:00:50.546204Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:00:50.546207Z node 404 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T10:00:50.546211Z node 404 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T10:00:50.546214Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T10:00:50.546289Z node 404 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:00:50.546298Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:00:50.546300Z node 404 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T10:00:50.546303Z node 404 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-05-05T10:00:50.546305Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-05-05T10:00:50.546311Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T10:00:50.546315Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [404:127:2152] 2025-05-05T10:00:50.546331Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T10:00:50.546334Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-05-05T10:00:50.546339Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T10:00:50.546736Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T10:00:50.546990Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T10:00:50.547006Z node 404 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T10:00:50.547014Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T10:00:50.547021Z node 404 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T10:00:50.547024Z node 404 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T10:00:50.547027Z node 404 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1003, itemIdx# 4294967295 2025-05-05T10:00:50.547071Z node 404 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T10:00:50.547540Z node 404 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-05-05T10:00:50.547613Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-05-05T10:00:50.547621Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-05-05T10:00:50.547695Z node 404 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-05-05T10:00:50.547714Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-05-05T10:00:50.547719Z node 404 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [404:882:2814] TestWaitNotification: OK eventTxId 1003 >> test.py::test[pg-tpcds-q54-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-ForceBlocks] >> test.py::test[like-regexp_clause--ForceBlocks] [GOOD] >> test.py::test[like-regexp_clause--Results] >> test.py::test[window-generic/aggregations_mixed--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_mixed--Results] >> test.py::test[sampling-zero_percentage--Results] [GOOD] >> test.py::test[schema-append_to_desc--ForceBlocks] >> test.py::test[aggregate-native_desc_group_compact_by--ForceBlocks] >> test.py::test[schema-select_all-row_spec_extra_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] [GOOD] >> test.py::test[optimizers-simplified_path_constraint--Results] [SKIPPED] >> test.py::test[optimizers-yql-17413-topsort--Results] >> test.py::test[blocks-distinct_opt_state_all--Results] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Results] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[produce-process_multi_in--Results] [SKIPPED] >> test.py::test[order_by-sort_simple--Results] [GOOD] >> test.py::test[pg-in_mixed--ForceBlocks] >> test.py::test[count-count_by_nulls--Results] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q72-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-ForceBlocks] >> test.py::test[insert-insert_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-insert_null-default.txt-Results] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] >> test.py::test[aggregate-avg_and_sum-default.txt-Results] [GOOD] >> test.py::test[aggregate-error_type--ForceBlocks] >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> test.py::test[ytflow-select_over_static--Results] [SKIPPED] >> test.py::test[like-regexp_clause--Results] [GOOD] >> test.py::test[lineage-isolated-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-isolated-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-insert_fill--ForceBlocks] [SKIPPED] >> test.py::test[multicluster-insert_fill--Results] [SKIPPED] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q91-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q91-default.txt-Results] >> test.py::test[column_order-select_groupby_with_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_groupby_with_star-default.txt-Results] >> test.py::test[join-mergejoin_with_table_range--Results] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--ForceBlocks] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_inner_1o2o--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-off-ForceBlocks] >> test.py::test[select-where_not_null--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] >> test.py::test[window-full/session_aliases--Results] [GOOD] >> test.py::test[window-win_expr_bounds--Results] >> test.py::test[insert-insert_null-default.txt-Results] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-ForceBlocks] >> test.py::test[pg-name--ForceBlocks] [GOOD] >> test.py::test[pg-name--Results] >> test.py::test[join-convert_key--ForceBlocks] [GOOD] >> test.py::test[join-convert_key--Results] >> test.py::test[join-flatten_columns1-off-ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns1-off-Results] [SKIPPED] >> test.py::test[join-full_join-off-ForceBlocks] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[ytflow-select_over_static--Results] [SKIPPED] >> test.py::test[epochs-reset_sortness_on_append--ForceBlocks] [GOOD] >> test.py::test[epochs-reset_sortness_on_append--Results] >> test.py::test[udf-udaf_short--ForceBlocks] [GOOD] >> test.py::test[udf-udaf_short--Results] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--ForceBlocks] >> test.py::test[pg-tpcds-q91-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-ForceBlocks] >> test.py::test[count-count_all_grouped--ForceBlocks] [GOOD] >> test.py::test[count-count_all_grouped--Results] >> test.py::test[aggregate-error_type--ForceBlocks] [GOOD] >> test.py::test[aggregate-error_type--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having--ForceBlocks] >> test.py::test[aggregate-group_by_ru_with_select_distinct--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_mixed--Results] [GOOD] >> test.py::test[window-rank/plain--ForceBlocks] >> test.py::test[pg-tpcds-q58-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Results] >> test.py::test[aggregate-aggregate_with_lambda--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda--Results] >> test.py::test[table_range-merge_non_strict--ForceBlocks] [GOOD] >> test.py::test[table_range-merge_non_strict--Results] >> test.py::test[schema-user_schema_with_sort--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_with_sort--Results] >> test.py::test[table_range-concat_sorted_max_tables--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Results] >> test.py::test[join-three_equalities-off-ForceBlocks] [GOOD] >> test.py::test[join-three_equalities-off-Results] >> test.py::test[blocks-combine_hashed_minmax_double--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_double--Results] >> test.py::test[in-in_tablesource_to_equijoin--Results] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-ForceBlocks] >> test.py::test[join-three_equalities-off-Results] [SKIPPED] >> test.py::test[join-yql_465-off-ForceBlocks] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-ForceBlocks] >> test.py::test[join-pullup_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_inner-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left-off-ForceBlocks] >> test.py::test[pg-name--Results] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-ForceBlocks] >> test.py::test[join-compact_join--Results] [GOOD] >> test.py::test[join-equi_join_three_asterisk-off-Results] [SKIPPED] >> test.py::test[join-full_equal_null--Results] >> test.py::test[lineage-join_as_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-list_literal3-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_member_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-sort_force--Results] [SKIPPED] >> test.py::test[optimizers-aggregate_over_aggregate--Results] >> test.py::test[blocks-interval_add_interval_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Results] >> test.py::test[column_order-select_groupby_with_star-default.txt-Results] [GOOD] >> test.py::test[column_order-select_limit_offset-default.txt-ForceBlocks] >> test.py::test[epochs-reset_sortness_on_append--Results] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--ForceBlocks] >> test.py::test[udf-udaf_short--Results] [GOOD] >> test.py::test[union_all-infer_3-default.txt-ForceBlocks] >> test.py::test[schema-append_to_desc--ForceBlocks] [GOOD] >> test.py::test[schema-append_to_desc--Results] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[aggregate-group_by_ru_with_select_distinct--ForceBlocks] [GOOD] >> test.py::test[join-convert_key--Results] [GOOD] >> test.py::test[join-convert_key-off-ForceBlocks] >> test.py::test[pg-tpch-q08-default.txt-Results] >> test.py::test[blocks-date_sub--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub--Results] >> test.py::test[join-lookupjoin_bug7646_subst--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--Results] >> test.py::test[pg-tpcds-q58-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-ForceBlocks] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] >> test.py::test[count-count_all_grouped--Results] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Results] >> test.py::test[aggr_factory-log_histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] >> test.py::test[table_range-concat_sorted_max_tables--Results] [GOOD] >> test.py::test[table_range-range_with_view--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] >> test.py::test[schema-user_schema_with_sort--Results] [GOOD] >> test.py::test[select-bit_ops-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_base_fail--Results] >> test.py::test[pg-in_mixed--ForceBlocks] [GOOD] >> test.py::test[pg-in_mixed--Results] >> test.py::test[pg-tpcds-q94-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-Results] >> test.py::test[table_range-merge_non_strict--Results] [GOOD] >> test.py::test[tpch-q13-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_with_lambda--Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-ForceBlocks] >> test.py::test[join-nopushdown_filter_over_inner--ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--Results] >> test.py::test[blocks-combine_hashed_minmax_double--Results] [GOOD] >> test.py::test[blocks-combine_hashed_set--ForceBlocks] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout1--Results] >> test.py::test[schema-user_schema_directread-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-Results] >> test.py::test[schema-append_to_desc--Results] [GOOD] >> test.py::test[schema-copy-schema-ForceBlocks] >> test.py::test[aggregate-native_desc_group_compact_by--ForceBlocks] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Results] >> test.py::test[aggregate-group_by_mul_gb_ru--Results] >> test.py::test[ql_filter-integer_escaping--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_escaping--Results] >> test.py::test[join-mapjoin_early_rewrite-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names-off-ForceBlocks] >> test.py::test[insert-insert_relabeled-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-insert_relabeled-default.txt-Results] >> test.py::test[pg-in_mixed--Results] [GOOD] >> test.py::test[pg-select_yql_type--ForceBlocks] >> test.py::test[pg-tpcds-q94-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_columns-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-17413-topsort--Results] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] >> test.py::test[join-lookupjoin_bug7646_subst--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-ForceBlocks] >> test.py::test[simple_columns-simple_columns_base_fail--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] >> test.py::test[schema-user_schema_directread-default.txt-Results] [GOOD] >> test.py::test[select-hits_count--ForceBlocks] >> test.py::test[window-win_expr_bounds--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--Results] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi--ForceBlocks] >> test.py::test[aggregate-group_by_expr_and_having--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having--Results] >> test.py::test[ql_filter-integer_escaping--Results] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--ForceBlocks] >> test.py::test[in-in_with_opt_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] >> test.py::test[join-full_join-off-ForceBlocks] [GOOD] >> test.py::test[join-full_join-off-Results] [SKIPPED] >> test.py::test[join-inner_with_select--ForceBlocks] >> test.py::test[union_all-infer_3-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-infer_3-default.txt-Results] >> test.py::test[sampling-zero_percentage--Results] >> test.py::test[pg-select_alias_partial-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-Results] >> test.py::test[blocks-lazy_nonstrict_nested--Results] [GOOD] >> test.py::test[blocks-partial_blocks1--Results] >> test.py::test[pg-tpch-q04-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] >> test.py::test[join-yql_465-off-ForceBlocks] [GOOD] >> test.py::test[join-yql_465-off-Results] [SKIPPED] >> test.py::test[key_filter-is_null_with_condition--ForceBlocks] >> test.py::test[join-nopushdown_filter_over_inner--Results] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner-off-ForceBlocks] >> test.py::test[insert-insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-keepmeta--ForceBlocks] >> test.py::test[pg-tpcds-q87-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-Results] >> test.py::test[window-win_lead_in_mem-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-Results] >> test.py::test[column_order-select_limit_offset-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_limit_offset-default.txt-Results] >> test.py::test[select-bit_ops-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-bit_ops-default.txt-Results] >> test.py::test[type_v3-append_diff_layout1--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-file_outer--Results] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] >> test.py::test[distinct-distinct_list_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-struct_type--Results] >> test.py::test[join-convert_key-off-ForceBlocks] [GOOD] >> test.py::test[join-convert_key-off-Results] [SKIPPED] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-ForceBlocks] >> test.py::test[union_all-infer_3-default.txt-Results] [GOOD] >> test.py::test[view-file_eval--ForceBlocks] >> test.py::test[optimizers-aggregate_over_aggregate--Results] [GOOD] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--Results] [SKIPPED] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] >> test.py::test[optimizers-yql-15210_sqlin--Results] [SKIPPED] >> test.py::test[order_by-assume_over_input--Results] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] [GOOD] >> test.py::test[join-yql-14829_left-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_left-off-Results] [SKIPPED] >> test.py::test[key_filter-calc_dependent-default.txt-ForceBlocks] >> test.py::test[insert-fail_read_view_after_modify--ForceBlocks] >> test.py::test[join-full_equal_null--Results] [GOOD] >> test.py::test[join-full_trivial-off-Results] [SKIPPED] >> test.py::test[join-grace_join1-off-Results] [SKIPPED] >> test.py::test[join-inner_all--Results] >> test.py::test[table_range-range_with_view--ForceBlocks] [GOOD] >> test.py::test[table_range-range_with_view--Results] >> test.py::test[aggregate-group_by_expr_and_having--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--ForceBlocks] >> test.py::test[pg-tpcds-q87-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-ForceBlocks] >> test.py::test[aggregate-native_desc_group_compact_by--Results] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-ForceBlocks] >> test.py::test[window-rank/plain--ForceBlocks] [GOOD] >> test.py::test[window-rank/plain--Results] >> test.py::test[blocks-combine_hashed_set--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_set--Results] >> test.py::test[schema-copy-schema-ForceBlocks] [GOOD] >> test.py::test[schema-copy-schema-Results] >> test.py::test[column_order-select_limit_offset-default.txt-Results] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--ForceBlocks] [GOOD] >> test.py::test[expr-double_join_with_list_from_range--Results] >> test.py::test[window-win_lead_in_mem-default.txt-Results] [GOOD] >> test.py::test[tpch-q13-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] >> test.py::test[pg-select_alias_partial-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-ForceBlocks] >> test.py::test[blocks-date_sub--Results] [GOOD] >> test.py::test[blocks-filter_expr--ForceBlocks] >> test.py::test[select-bit_ops-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt-ForceBlocks] >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data >> test.py::test[action-action_eval_cluster_and_table-default.txt-Results] >> test.py::test[view-file_outer--Results] [GOOD] >> test.py::test[view-file_outer_library--Results] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-ForceBlocks] >> test.py::test[view-file_eval--ForceBlocks] [GOOD] >> test.py::test[view-file_eval--Results] [GOOD] >> test.py::test[view-view_with_lambda_process--ForceBlocks] >> test.py::test[pg-select_yql_type--ForceBlocks] [GOOD] >> test.py::test[pg-select_yql_type--Results] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[window-win_lead_in_mem-default.txt-Results] [GOOD] >> test.py::test[insert-fail_read_view_after_modify--ForceBlocks] [GOOD] >> test.py::test[insert-fail_read_view_after_modify--Results] [GOOD] >> test.py::test[insert-override-from_sorted_calc-ForceBlocks] >> test.py::test[sampling-zero_percentage--Results] [GOOD] >> test.py::test[schema-concat--Results] >> test.py::test[schema-copy-schema-Results] [GOOD] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[column_order-select_limit_offset-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/py3test >> test_format_setting.py::TestS3::test_s3_push_down_parquet[v2] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000c97/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000c97/ydb/tests/fq/s3/test-results/py3test/testing_out_stuff/test_format_setting/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3525842) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 3529647 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[blocks-combine_hashed_set--Results] [GOOD] >> test.py::test[blocks-date_top_sort--ForceBlocks] >> test.py::test[select-hits_count--ForceBlocks] [GOOD] >> test.py::test[select-hits_count--Results] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--Results] >> test.py::test[blocks-partial_blocks1--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite--ForceBlocks] >> test.py::test[pg-tpch-q08-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Results] >> test.py::test[pg-tpch-q04-default.txt-Results] [GOOD] >> test.py::test[pragma-file-default.txt-ForceBlocks] >> test.py::test[join-inner_with_select--ForceBlocks] [GOOD] >> test.py::test[join-inner_with_select--Results] >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[tpch-q12-default.txt-ForceBlocks] >> test.py::test[insert-keepmeta--ForceBlocks] [GOOD] |97.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[schema-copy-schema-Results] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta--Results] >> test.py::test[key_filter-is_null_with_condition--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_with_condition--Results] >> test.py::test[join-nopushdown_filter_over_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_common_left_cross--ForceBlocks] >> test.py::test[view-file_outer_library--Results] [GOOD] >> test.py::test[pg-tpch-q01-default.txt-Results] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] [GOOD] >> test.py::test[view-secure--Results] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] [GOOD] >> test.py::test[union-union_multiin--Results] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Results] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[blocks-partial_blocks1--Results] [GOOD] >> test.py::test[order_by-assume_over_input--Results] [GOOD] >> test.py::test[order_by-literal_take_zero_sort--Results] >> test.py::test[distinct-distinct_window-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-Results] >> test.py::test[pg-select_yql_type--Results] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_table_range-off-ForceBlocks] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--Results] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_and_dict_access--ForceBlocks] >> test.py::test[tpch-q13-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout2--ForceBlocks] >> test.py::test[window-win_func_aggr_4func_sort--Results] [GOOD] >> test.py::test[window-win_func_over_group_by--Results] >> test.py::test[ansi_idents-basic_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] >> test.py::test[type_v3-append_diff_layout2--ForceBlocks] [SKIPPED] >> test.py::test[insert-keepmeta--Results] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-ForceBlocks] >> test.py::test[action-action_eval_cluster_and_table-default.txt-Results] [GOOD] >> test.py::test[action-discard-default.txt-Results] >> test.py::test[key_filter-calc_dependent-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-calc_dependent-default.txt-Results] >> test.py::test[expr-double_join_with_list_from_range--Results] [GOOD] >> test.py::test[expr-empty_iterator2--ForceBlocks] >> test.py::test[key_filter-is_null_with_condition--Results] [GOOD] >> test.py::test[select-hits_count--Results] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-ForceBlocks] >> test.py::test[select-dict_lookup_column_names-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt-Results] >> test.py::test[window-rank/plain--Results] [GOOD] >> test.py::test[window-win_inline_spec-default.txt-ForceBlocks] >> test.py::test[blocks-struct_type--Results] [GOOD] >> test.py::test[case-case_then_else-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-Results] >> test.py::test[blocks-filter_expr--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_expr--Results] >> test.py::test[join-inner_with_select--Results] [GOOD] >> test.py::test[join-join_key_cmp_udf--ForceBlocks] >> test.py::test[view-secure--Results] [GOOD] >> test.py::test[view-trivial_view_concat--Results] >> test.py::test[join-inner_all--Results] [GOOD] >> test.py::test[join-join_and_distinct_key--Results] >> test.py::test[pg-select_subquery2_qstar-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[type_v3-append_diff_layout2--ForceBlocks] [SKIPPED] >> test_ydb_backup.py::TestRestoreNoData::test_restore_no_data [GOOD] >> test.py::test[schema-concat--Results] [GOOD] >> test.py::test[schema-copy-schema-Results] >> test.py::test[view-view_with_lambda_process--ForceBlocks] [GOOD] >> test.py::test[view-view_with_lambda_process--Results] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[ql_filter-integer_many_noskiff--Results] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_rw--ForceBlocks] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[key_filter-is_null_with_condition--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_with_filter--Results] >> test.py::test[insert-override-from_sorted_calc-ForceBlocks] [GOOD] >> test.py::test[insert-override-from_sorted_calc-Results] >> test.py::test[pg-tpch-q05-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-Results] >> test.py::test[key_filter-calc_dependent-default.txt-Results] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-ForceBlocks] >> test.py::test[select-dict_lookup_column_names-default.txt-Results] [GOOD] >> test.py::test[select-optional_in_job--ForceBlocks] >> test.py::test[blocks-filter_expr--Results] [GOOD] >> test.py::test[blocks-top_sort_one_asc--ForceBlocks] >> test.py::test[pragma-file-default.txt-ForceBlocks] [GOOD] >> test.py::test[pragma-file-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Results] >> test.py::test[distinct-distinct_window-default.txt-Results] [GOOD] >> test.py::test[expr-evaluate_parse_inf_nan--ForceBlocks] [SKIPPED] >> test.py::test[expr-evaluate_parse_inf_nan--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q01-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] >> test.py::test[order_by-literal_take_zero_sort--Results] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] >> test.py::test[view-view_with_lambda_process--Results] [GOOD] >> test.py::test[window-current/ansi_current--ForceBlocks] >> test.py::test[aggregate-group_by_mul_gb_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread--Results] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail--ForceBlocks] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail--Results] [SKIPPED] >> test.py::test[produce-process_sorted_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_sorted_multi_out--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream--Results] [SKIPPED] >> test.py::test[result_types-containers-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q02-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] >> test.py::test[insert-override-from_sorted_calc-Results] [GOOD] >> test.py::test[insert-override-from_sorted_desc-ForceBlocks] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_take_zero_sort--ForceBlocks] >> test.py::test[pragma-file-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_field_subset--ForceBlocks] >> test.py::test[tpch-q12-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q12-default.txt-Results] >> test.py::test[blocks-date_top_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-date_top_sort--Results] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_with_table_range-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_table_range-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on--ForceBlocks] >> test.py::test[insert-literals_to_string-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-literals_to_string-default.txt-Results] >> test.py::test[join-premap_common_left_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_left_cross--Results] >> test.py::test[case-case_then_else-default.txt-Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> test.py::test[pg-tpch-q07-default.txt-Results] >> test.py::test[pg-tpcds-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-ForceBlocks] >> test.py::test[schema-copy-schema-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_part-Results] >> test.py::test[join-mapjoin_early_rewrite--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--ForceBlocks] >> test.py::test[action-action_eval_cluster_table_for--Results] >> test.py::test[order_by-native_desc_publish--Results] >> test.py::test[union-union_multiin--Results] [GOOD] >> test.py::test[union-union_trivial-default.txt-Results] >> test.py::test[pg-tpch-q05-default.txt-Results] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-ForceBlocks] >> test.py::test[view-trivial_view_concat--Results] [GOOD] >> test.py::test[blocks-finalize_hashed_keys--Results] >> test.py::test[expr-empty_iterator2--ForceBlocks] [GOOD] >> test.py::test[expr-empty_iterator2--Results] >> test.py::test[order_by-native_desc_publish--Results] [SKIPPED] >> test.py::test[order_by-order_by_expr--Results] >> test.py::test[weak_field-weak_field_esc_string--Results] >> test.py::test[insert-literals_to_string-default.txt-Results] [GOOD] >> test.py::test[insert-override--ForceBlocks] >> test.py::test[column_group-hint_non_str_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_str_yson_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-insert--ForceBlocks] >> test.py::test[join-join_without_correlation_and_dict_access--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Results] >> test.py::test[tpch-q12-default.txt-Results] [GOOD] >> test.py::test[type_v3-bare_yson--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-bare_yson--Results] [SKIPPED] >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] >> test.py::test[join-join_key_cmp_udf--ForceBlocks] [GOOD] >> test.py::test[join-join_key_cmp_udf--Results] >> test.py::test[join-join_and_distinct_key-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names--Results] >> test.py::test[blocks-date_top_sort--Results] [GOOD] >> test.py::test[blocks-partial_blocks1--ForceBlocks] >> test.py::test[bigdate-tz_table_rw--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_rw--Results] >> test.py::test[window-win_inline_spec-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_inline_spec-default.txt-Results] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[pg-tpch-q05-default.txt-Results] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-Results] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--ForceBlocks] >> test.py::test[blocks-top_sort_one_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_one_asc--Results] >> test.py::test[key_filter-dict_contains-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-Results] >> test.py::test[action-discard-default.txt-Results] [GOOD] >> test.py::test[action-eval_folder_via_file_in_job--Results] >> test.py::test[order_by-literal_take_zero_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_take_zero_sort--Results] >> test.py::test[expr-empty_iterator2--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] >> test.py::test[pg-tpcds-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Results] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Results] >> test.py::test[join-lookupjoin_semi_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq-off-ForceBlocks] >> test.py::test[bigdate-tz_table_rw--Results] [GOOD] >> test.py::test[binding-insert_binding--ForceBlocks] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--ForceBlocks] >> test.py::test[join-premap_common_left_cross--Results] [GOOD] >> test.py::test[insert-override-from_sorted_desc-ForceBlocks] [GOOD] >> test.py::test[insert-override-from_sorted_desc-Results] >> test.py::test[select-optional_in_job--ForceBlocks] [GOOD] >> test.py::test[select-optional_in_job--Results] >> test.py::test[schema-select_all-row_spec_part-Results] [GOOD] >> test.py::test[schema-select_all_forceinferschema--Results] [SKIPPED] >> test.py::test[schema-select_all_inferschema_range_empty_fail--Results] >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[column_group-groups-single-Results] [SKIPPED] >> test.py::test[column_group-hint-single-Results] [SKIPPED] >> test.py::test[column_group-hint_append--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail3--Results] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-join--Results] [SKIPPED] >> test.py::test[column_order-winfunc-default.txt-Results] >> test.py::test[schema-user_schema_patch_columns--ForceBlocks] >> test.py::test[join-nopushdown_filter_with_depends_on--ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on--Results] >> test.py::test[order_by-literal_take_zero_sort--Results] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-ForceBlocks] >> test.py::test[blocks-top_sort_one_asc--Results] [GOOD] >> test.py::test[case-case_when_then-default.txt-ForceBlocks] >> test.py::test[join-join_key_cmp_udf--Results] [GOOD] >> test.py::test[join-left_all--ForceBlocks] >> test.py::test[pg-tpcds-q06-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Results] >> test.py::test[key_filter-dict_contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-string_with-default.txt-ForceBlocks] >> test.py::test[produce-reduce_all_field_subset--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_field_subset--Results] >> test.py::test[pg-tpcds-q19-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-ForceBlocks] >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-Results] >> test.py::test[window-win_inline_spec-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] >> test.py::test[produce-process_with_assume--ForceBlocks] >> test.py::test[result_types-containers-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-containers-default.txt-Results] >> test.py::test[join-join_and_distinct_key--Results] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by--Results] >> test.py::test[join-join_without_correlation_and_dict_access--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-ForceBlocks] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[join-premap_common_left_cross--Results] [GOOD] >> test.py::test[insert-override-from_sorted_desc-Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] [GOOD] >> test.py::test[window-current/aggregations--Results] >> test.py::test[action-action_eval_cluster_table_for--Results] [GOOD] >> test.py::test[action-eval_drop--Results] >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[window-win_inline_spec-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_expr--Results] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] >> test.py::test[pg-tpcds-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-ForceBlocks] >> test.py::test[schema-select_all_inferschema_range_empty_fail--Results] [GOOD] >> test.py::test[schema-select_with_map-partial_read_schema-Results] >> test.py::test[select-optional_in_job--Results] [GOOD] >> test.py::test[select-where_cast-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] >> test.py::test[insert-override--ForceBlocks] [GOOD] >> test.py::test[insert-override--Results] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[insert-override-from_sorted_desc-Results] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Results] >> test.py::test[action-eval_folder_via_file_in_job--Results] [GOOD] >> test.py::test[action-eval_input_output_table--Results] >> test.py::test[type_v3-mergejoin_with_sort--ForceBlocks] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--Results] >> test.py::test[result_types-containers-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_expr-default.txt-ForceBlocks] >> test.py::test[blocks-partial_blocks1--ForceBlocks] [GOOD] >> test.py::test[blocks-partial_blocks1--Results] >> test.py::test[join-nopushdown_filter_with_depends_on--Results] [GOOD] >> test.py::test[join-premap_map_semi--ForceBlocks] >> test.py::test[produce-reduce_all_field_subset--Results] [GOOD] >> test.py::test[produce-reduce_all_multi_in-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_multi_in-default.txt-Results] [SKIPPED] >> test.py::test[union-union_trivial-default.txt-Results] [GOOD] >> test.py::test[window-current/ansi_current--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current--Results] >> test.py::test[produce-reduce_multi_in_ref--ForceBlocks] |97.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] >> test.py::test[blocks-finalize_hashed_keys--Results] [GOOD] >> test.py::test[blocks-interval_mul--Results] >> test.py::test[insert-override--Results] [GOOD] >> test.py::test[insert-select_after_replace-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] |97.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] >> test.py::test[join-lookupjoin_semi_subq-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-off-ForceBlocks] >> test.py::test[binding-insert_binding--ForceBlocks] [GOOD] >> test.py::test[binding-insert_binding--Results] >> test.py::test[blocks-partial_blocks1--Results] [GOOD] >> test.py::test[blocks-string_len_and_cmp--ForceBlocks] |97.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[union-union_trivial-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Results] [GOOD] >> test.py::test[order_by-order_by_tablerow_column--Results] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] >> test.py::test[order_by-order_by_value_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] >> test.py::test[join-join_without_correlation_names--Results] [GOOD] >> test.py::test[join-left_join_null_column--Results] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--ForceBlocks] >> test.py::test[pg-wide_sort--Results] >> test.py::test[schema-user_schema_patch_columns--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] >> test.py::test[join-left_all--ForceBlocks] [GOOD] >> test.py::test[join-left_all--Results] >> test.py::test[key_filter-string_with-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with-default.txt-Results] >> test.py::test[action-eval_drop--Results] [GOOD] >> test.py::test[action-eval_for-default.txt-Results] >> test.py::test[aggr_factory-multi--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-multi--Results] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-ForceBlocks] >> test.py::test[case-case_when_then-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_when_then-default.txt-Results] >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> test.py::test[type_v3-split--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-split--Results] [SKIPPED] >> test.py::test[type_v3-uuid--ForceBlocks] >> test.py::test[produce-process_with_assume--ForceBlocks] [GOOD] >> test.py::test[produce-process_with_assume--Results] >> test.py::test[schema-select_with_map-partial_read_schema-Results] [GOOD] >> test.py::test[schema-skip_complex_type2--Results] >> test.py::test[pg-tpcds-q28-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Results] >> test.py::test[window-current/ansi_current--Results] [GOOD] >> test.py::test[window-full/noncompact_with_tablerow--ForceBlocks] >> test.py::test[binding-insert_binding--Results] [GOOD] >> test.py::test[binding-table_concat_binding-default.txt-ForceBlocks] >> test.py::test[column_order-insert--ForceBlocks] [GOOD] >> test.py::test[column_order-insert--Results] >> test.py::test[order_by-literal_empty_list_sort--Results] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Results] >> test.py::test[join-join_semi_correlation_in_order_by--Results] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Results] [SKIPPED] >> test.py::test[pg-tpch-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Results] >> test.py::test[pg-tpcds-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-Results] >> test.py::test[schema-user_schema_patch_columns--Results] [GOOD] >> test.py::test[select-host_count--ForceBlocks] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-ForceBlocks] >> test.py::test[join-join_without_correlation_and_dict_access-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Results] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty--ForceBlocks] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty--Results] [SKIPPED] >> test.py::test[column_order-winfunc-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_stream-empty-Results] >> test.py::test[select-where_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-where_cast-default.txt-Results] >> test.py::test[produce-process_with_assume--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_asc--Results] >> test.py::test[action-eval_input_output_table--Results] [GOOD] >> test.py::test[action-eval_typeof_output_table--Results] >> test.py::test[window-win_multiaggr-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] >> test.py::test[key_filter-string_with-default.txt-Results] [GOOD] |97.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[join-join_with_duplicate_keys_on_sorted-off-Results] [SKIPPED] >> test.py::test[case-case_when_then-default.txt-Results] [GOOD] >> test.py::test[column_group-hint-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint-single-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_append_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append_fail--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail4--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail4--Results] [SKIPPED] >> test.py::test[column_order-align_publish--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt-Results] [GOOD] >> test.py::test[table_range-range_over_desc--Results] >> test.py::test[join-premap_map_semi--ForceBlocks] [GOOD] >> test.py::test[join-premap_map_semi--Results] >> test.py::test[join-left_all--Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-ForceBlocks] >> test.py::test[join-nested_semi_join-off-Results] [SKIPPED] >> test.py::test[join-order_of_qualified--ForceBlocks] |97.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[join-late_mergejoin_on_empty--Results] [SKIPPED] |97.2%| [TA] $(B)/ydb/tests/fq/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[sampling-bind_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_expr-default.txt-Results] >> test.py::test[pg-tpcds-q15-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q28-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_tablerow_column--Results] [GOOD] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] >> test.py::test[select-where_cast-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-ForceBlocks] >> test.py::test[column_order-insert--Results] [GOOD] >> test.py::test[column_order-insert_with_new_cols--ForceBlocks] |97.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[key_filter-string_with-default.txt-Results] [GOOD] >> test.py::test[insert-select_after_replace-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_after_replace-default.txt-Results] >> test.py::test[action-eval_typeof_output_table--Results] [GOOD] >> test.py::test[action-insert_each_from_folder--Results] >> test.py::test[optimizers-group_visit_lambdas--Results] >> test.py::test[action-eval_for-default.txt-Results] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] >> test.py::test[aggr_factory-max_by-default.txt-ForceBlocks] >> test.py::test[join-premap_map_semi--Results] [GOOD] >> test.py::test[join-pullup_context_dep--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_aliases--ForceBlocks] >> test.py::test[pg-wide_sort--Results] [GOOD] >> test.py::test[sampling-bind_expr-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--ForceBlocks] >> test.py::test[produce-reduce_multi_in_ref--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--Results] >> test.py::test[join-mapjoin_with_anonymous-off-ForceBlocks] [GOOD] >> test.py::test[schema-concat--ForceBlocks] >> test.py::test[join-mapjoin_with_anonymous-off-Results] [SKIPPED] >> test.py::test[join-premap_map_inner--ForceBlocks] >> test.py::test[file-parse_file_in_select_as_int--ForceBlocks] >> test.py::test[type_v3-uuid--ForceBlocks] [GOOD] >> test.py::test[blocks-string_len_and_cmp--ForceBlocks] [GOOD] >> test.py::test[type_v3-uuid--Results] >> test.py::test[blocks-string_len_and_cmp--Results] >> test.py::test[schema-skip_complex_type2--Results] [GOOD] >> test.py::test[schema-user_schema_missing_column--Results] >> test.py::test[blocks-interval_mul--Results] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Results] [GOOD] >> test.py::test[produce-process_with_python_stream-empty-Results] [GOOD] >> test.py::test[blocks-member--Results] >> test.py::test[order_by-native_desc_sort--Results] [SKIPPED] >> test.py::test[order_by-order_by_dynum-default.txt-Results] >> test.py::test[produce-reduce_all_field_subset--Results] >> test.py::test[flatten_by-flatten_with_group_by--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Results] >> test.py::test[insert-select_after_replace-default.txt-Results] [GOOD] >> test.py::test[window-current/aggregations--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-ForceBlocks] [GOOD] >> test.py::test[binding-table_concat_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] [GOOD] >> test.py::test[pg-tpch-q19-default.txt-Results] >> test.py::test[select-host_count--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Results] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[binding-table_concat_binding-default.txt-Results] >> test.py::test[aggregate-percentiles_containers--Results] >> test.py::test[select-host_count--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-ForceBlocks] |97.2%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part13/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[produce-process_multi_out_bad_count_fail--ForceBlocks] [GOOD] >> test.py::test[type_v3-uuid--Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-Results] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-ForceBlocks] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] [GOOD] >> test.py::test[produce-reduce_with_python_filter_and_having--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_filter_and_having--Results] >> test.py::test[produce-reduce_with_python_filter_and_having--Results] [SKIPPED] >> test.py::test[ql_filter-integer_many_left--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_asc--Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> test.py::test[join-left_join_null_column--Results] [GOOD] >> test.py::test[join-left_null_literal--Results] >> test.py::test[action-eval_column--ForceBlocks] |97.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[insert-select_after_replace-default.txt-Results] [GOOD] >> test_ydb_backup.py::TestClusterBackup::test_cluster_backup >> test.py::test[pg-tpcds-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] [GOOD] >> test.py::test[order_by-singular-default.txt-Results] >> test.py::test[join-order_of_qualified--ForceBlocks] [GOOD] >> test.py::test[join-order_of_qualified--Results] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q15-default.txt-Results] >> test.py::test[order_by-order_by_udf_duo--Results] >> test.py::test[order_by-singular-default.txt-Results] [SKIPPED] >> test.py::test[order_by-sort_decimals--Results] >> test.py::test[binding-table_concat_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint16--ForceBlocks] >> test.py::test[produce-reduce_multi_in_ref--Results] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--ForceBlocks] [SKIPPED] >> test.py::test[pg-tpcds-q68-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_having--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[pg-tpcds-q68-default.txt-Results] >> test.py::test[blocks-string_len_and_cmp--Results] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_base-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-align_publish--ForceBlocks] [GOOD] >> test.py::test[column_order-align_publish--Results] >> test.py::test[window-full/noncompact_with_tablerow--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_desc--Results] [GOOD] >> test.py::test[table_range-range_over_like--Results] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] >> test.py::test[window-full/noncompact_with_tablerow--Results] >> test.py::test[produce-reduce_multi_in_difftype--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Results] >> test.py::test[action-insert_each_from_folder--Results] [GOOD] >> test.py::test[action-subquery-default.txt-Results] >> test.py::test[schema-user_schema_missing_column--Results] [GOOD] >> test.py::test[schema-user_schema_mix2--Results] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] |97.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[join-lookupjoin_not_selected-off-Results] [SKIPPED] >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by--Results] [GOOD] >> test.py::test[hor_join-out_sampling--ForceBlocks] >> test.py::test[pg-tpcds-q32-default.txt-ForceBlocks] >> test.py::test[schema-concat--ForceBlocks] [GOOD] >> test.py::test[schema-concat--Results] >> test.py::test[join-pullup_context_dep--ForceBlocks] [GOOD] >> test.py::test[join-pullup_context_dep--Results] >> test.py::test[select-host_count--Results] [GOOD] >> test.py::test[select-result_size_limit--ForceBlocks] [SKIPPED] >> test.py::test[select-result_size_limit--Results] [SKIPPED] >> test.py::test[select-scalar_subquery_with_star-default.txt-ForceBlocks] |97.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] |97.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[pg-tpcds-q68-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-ForceBlocks] >> test.py::test[in-in_immediate_subquery-default.txt-Results] >> test.py::test[join-premap_map_inner--ForceBlocks] [GOOD] >> test.py::test[join-premap_map_inner--Results] >> test.py::test[order_by-order_by_dynum-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] >> test.py::test[file-parse_file_in_select_as_int--ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_int--Results] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-ForceBlocks] >> test.py::test[blocks-member--Results] [GOOD] >> test.py::test[column_order-align_publish--Results] [GOOD] >> test.py::test[optimizers-group_visit_lambdas--Results] [GOOD] >> test.py::test[blocks-minmax_strings--Results] >> test.py::test[column_order-select_action-default.txt-ForceBlocks] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Results] [SKIPPED] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] >> test.py::test[join-order_of_qualified--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] |97.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Results] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] >> test.py::test[union_all-mix_map_and_project--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_aliases--Results] >> test.py::test[schema-concat--Results] [GOOD] >> test.py::test[schema-insert-read_schema-ForceBlocks] >> test.py::test[join-left_null_literal--Results] [GOOD] >> test.py::test[aggr_factory-multi--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-ForceBlocks] >> test.py::test[produce-reduce_all_field_subset--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Results] [GOOD] >> test.py::test[aggr_factory-some-default.txt-ForceBlocks] >> test.py::test[produce-reduce_lambda_presort_twin_list--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_filter_and_having--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_with_trivial_remaps--Results] [SKIPPED] >> test.py::test[join-left_semi_with_other-off-Results] [SKIPPED] >> test.py::test[schema-append_to_desc_with_remap--ForceBlocks] >> test.py::test[join-left_trivial-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi--Results] >> test.py::test[ql_filter-integer_many_left--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_left--Results] >> test.py::test[result_types-pg-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_row_dispatcher.py::TestPqRowDispatcher::test_group_by_hop_restart_node [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=3699257) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[column_order-insert_with_new_cols--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_aliases--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Results] >> test.py::test[aggregate-group_by_session_aliases--Results] >> test.py::test[file-parse_file_in_select_as_int--Results] [GOOD] >> test.py::test[flatten_by-flatten_and_where--ForceBlocks] >> test.py::test[window-full/noncompact_with_tablerow--Results] [GOOD] >> test.py::test[window-full/session_aliases_compact--ForceBlocks] >> test.py::test[table_range-range_over_like--Results] [GOOD] >> test.py::test[type_v3-append_diff_layout2--Results] [SKIPPED] >> test.py::test[aggr_factory-max_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys-off-ForceBlocks] >> test.py::test[join-premap_map_inner--Results] [GOOD] >> test.py::test[join-premap_map_inner-off-ForceBlocks] >> test.py::test[blocks-add_uint16--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-max_by-default.txt-Results] >> test.py::test[pg-tpch-q19-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint16--Results] >> test.py::test[produce-process_with_lambda-default.txt-ForceBlocks] >> test.py::test[join-pullup_context_dep--Results] [GOOD] >> test.py::test[schema-user_schema_mix2--Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Results] [GOOD] >> test.py::test[join-simple_columns_partial-off-ForceBlocks] >> test.py::test[order_by-yql-19598--Results] >> test.py::test[select-create_tuples-default.txt-Results] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Results] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Results] >> test.py::test[action-runtime_if_select-default.txt-Results] >> test.py::test[ql_filter-integer_many_left--Results] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-sort_decimals--Results] [GOOD] >> test.py::test[action-subquery-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--ForceBlocks] >> test.py::test[blocks-add_uint16--Results] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--ForceBlocks] >> test.py::test[aggr_factory-booland-default.txt-Results] >> test.py::test[order_by-sort_with_take--Results] >> test.py::test[pg-tpcds-q82-default.txt-Results] >> test.py::test[blocks-block_output_various_types--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_output_various_types--Results] [SKIPPED] >> test.py::test[blocks-coalesce_ints--ForceBlocks] >> test.py::test[aggregate-group_by_session_aliases--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended--ForceBlocks] >> test.py::test[select-scalar_subquery_with_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] |97.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-ForceBlocks] [GOOD] |97.3%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-Results] >> test.py::test[column_order-insert_with_new_cols--Results] [GOOD] >> test.py::test[count-count--ForceBlocks] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt-Results] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-ForceBlocks] >> test.py::test[key_filter-contains_tuples-default.txt-Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[epochs-use_sorted_by_complex_type--Results] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Results] >> test.py::test[pg-tpcds-q32-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q36-default.txt-ForceBlocks] >> test.py::test[aggr_factory-max_by-default.txt-Results] [GOOD] >> test.py::test[result_types-pg-default.txt-Results] [GOOD] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] >> test.py::test[aggr_factory-median-default.txt-ForceBlocks] >> test.py::test[column_order-select_action-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_action-default.txt-Results] >> test.py::test[window-current/ansi_current_with_win--Results] [GOOD] >> test.py::test[window-current/session_incompat_sort--Results] >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[insert-merge_publish--ForceBlocks] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt-Results] [GOOD] |97.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] [GOOD] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] [GOOD] >> test.py::test[union_all-mix_map_and_project--ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-ForceBlocks] >> test.py::test[union_all-mix_map_and_project--Results] >> test.py::test[table_range-range_over_regexp--ForceBlocks] >> test.py::test[select-create_tuples-default.txt-Results] [GOOD] >> test.py::test[select-discard-default.txt-Results] >> test.py::test[produce-process_with_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_lambda-default.txt-Results] >> test.py::test[schema-append_to_desc_with_remap--ForceBlocks] [GOOD] >> test.py::test[schema-append_to_desc_with_remap--Results] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] >> test.py::test[order_by-yql-19598--Results] [GOOD] >> test.py::test[pg-drop_table--Results] >> test.py::test[schema-insert-read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-insert-read_schema-Results] >> test.py::test[join-mapjoin_partial_uniq_keys-off-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] >> test.py::test[join-mergejoin_force_align2-off-ForceBlocks] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--Results] [SKIPPED] >> test.py::test[join-premap_map_inner-off-ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_containers--Results] [GOOD] >> test.py::test[join-premap_map_inner-off-Results] [SKIPPED] >> test.py::test[aggregate-percentiles_grouped--Results] >> test.py::test[join-lookupjoin_semi--Results] [GOOD] >> test.py::test[hor_join-out_sampling--ForceBlocks] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap-off-ForceBlocks] >> test.py::test[join-mapjoin_with_anonymous--Results] >> test.py::test[hor_join-out_sampling--Results] >> test.py::test[flatten_by-flatten_and_where--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_and_where--Results] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--Results] [SKIPPED] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] >> test.py::test[join-mergejoin_force_no_sorted-off-ForceBlocks] >> test.py::test[order_by-sort_with_take--Results] [GOOD] >> test.py::test[order_by-sort_with_take_limit--Results] >> test.py::test[action-runtime_if_select-default.txt-Results] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] >> test.py::test[column_order-select_action-default.txt-Results] [GOOD] >> test.py::test[column_order-select_sample-default.txt-ForceBlocks] >> test.py::test[aggr_factory-some-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-some-default.txt-Results] >> test.py::test[join-premap_common_inner_both_sides--Results] [GOOD] >> test.py::test[join-pullup_extra_columns--ForceBlocks] >> test.py::test[schema-select_all-row_spec_diff_sort_desc-Results] [GOOD] >> test.py::test[blocks-string_filter--Results] >> test.py::test[schema-user_schema_no_infer--ForceBlocks] >> test.py::test[produce-process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-ForceBlocks] >> test.py::test[schema-append_to_desc_with_remap--Results] [GOOD] >> test.py::test[schema-def_values_job--ForceBlocks] >> test.py::test[union_all-mix_map_and_project--Results] [GOOD] >> test.py::test[view-secure_eval_dyn--ForceBlocks] >> test.py::test[join-simple_columns_partial-off-ForceBlocks] [GOOD] >> test.py::test[join-simple_columns_partial-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key-off-ForceBlocks] >> test.py::test[ql_filter-integer_single_disable_prune--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] |97.3%| [TA] {RESULT} $(B)/ydb/tests/fq/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[action-eval_column--ForceBlocks] [GOOD] >> test.py::test[action-eval_column--Results] |97.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[join-mergejoin_force_align2--Results] [SKIPPED] >> test.py::test[aggregate-group_by_session_aliases--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Results] >> test.py::test[schema-insert-read_schema-Results] [GOOD] >> test.py::test[optimizers-sort_by_nonstrict_const--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window--Results] >> test.py::test[blocks-coalesce_ints--ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_ints--Results] >> test.py::test[case-case_val_then_else-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Results] >> test.py::test[flatten_by-flatten_and_where--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_single_disable_prune--Results] [GOOD] >> test.py::test[sampling-join_left_sample-default.txt-ForceBlocks] |97.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[schema-insert-read_schema-Results] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] >> test.py::test[view-secure_eval_dyn--ForceBlocks] [GOOD] >> test.py::test[view-secure_eval_dyn--Results] [GOOD] >> test.py::test[weak_field-hor_join_with_mix_weak_access--ForceBlocks] >> test.py::test[window-full/session_aliases_compact--ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--Results] >> test.py::test[window-full/session_aliases_compact--Results] >> test.py::test[aggr_factory-some-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-ForceBlocks] >> test.py::test[pg-drop_table--Results] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Results] >> test.py::test[case-case_val_then_else-default.txt-Results] [GOOD] >> test.py::test[column_group-groups-perusage-ForceBlocks] >> test.py::test[column_group-groups-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-groups-perusage-Results] [SKIPPED] >> test.py::test[table_range-range_over_regexp--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_regexp--Results] >> test.py::test[blocks-coalesce_ints--Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt--ForceBlocks] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] >> test.py::test[aggr_factory-booland-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-histogram-default.txt-Results] >> test.py::test[aggregate-group_by_session_extended--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_extended--Results] >> test.py::test[epochs-use_sorted_by_complex_type--Results] [GOOD] >> test.py::test[expr-langver--Results] >> test.py::test[key_filter-contains_tuples-default.txt-Results] [GOOD] >> test.py::test[key_filter-decimal--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-Results] >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Results] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-Results] >> test.py::test[schema-user_schema_no_infer--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_no_infer--Results] >> test.py::test[join-mergejoin_force_one_sorted--Results] |97.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[column_group-groups-perusage-Results] [SKIPPED] >> test.py::test[order_by-sort_with_take_limit--Results] [GOOD] >> test.py::test[pg-all_data--Results] >> test.py::test[column_order-select_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_sample-default.txt-Results] >> test.py::test[join-mergejoin_force_no_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_per_link--Results] [SKIPPED] >> test.py::test[join-mergejoin_left_null_column--ForceBlocks] >> test.py::test[produce-process_with_python_as_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] >> test.py::test[join-premap_nonseq_flatmap-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_nonseq_flatmap-off-Results] [SKIPPED] >> test.py::test[join-yql-8125-off-ForceBlocks] >> test.py::test[table_range-range_over_regexp--Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail--Results] [GOOD] >> test.py::test[view-file_inner_library--Results] >> test.py::test[tpch-q6-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail--ForceBlocks] >> test.py::test[join-pullup_extra_columns--ForceBlocks] [GOOD] >> test.py::test[join-pullup_extra_columns--Results] >> test.py::test[join-mapjoin_with_anonymous--Results] [GOOD] >> test.py::test[blocks-string_filter--Results] [GOOD] >> test.py::test[column_group-hint_append2--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail2--Results] >> test.py::test[insert-merge_publish--ForceBlocks] [GOOD] >> test.py::test[insert-merge_publish--Results] >> test.py::test[schema-def_values_job--ForceBlocks] [GOOD] >> test.py::test[schema-def_values_job--Results] >> test.py::test[join-mergejoin_big_primary-off-Results] [SKIPPED] >> test.py::test[join-premap_common_cross--Results] >> test.py::test[column_group-hint_diff_grp_fail2--Results] [SKIPPED] >> test.py::test[column_group-publish-single-Results] [SKIPPED] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] >> test.py::test[aggr_factory-median-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Results] >> test.py::test[pg-tpcds-q36-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q36-default.txt-Results] >> test.py::test[join-mergejoin_force_align2-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_align2-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on-off-ForceBlocks] |97.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[join-split_to_list_as_key-off-ForceBlocks] [GOOD] >> test.py::test[join-split_to_list_as_key-off-Results] [SKIPPED] >> test.py::test[key_filter-convert--ForceBlocks] >> test.py::test[schema-user_schema_no_infer--Results] [GOOD] >> test.py::test[select-literal_negative-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_session_extended--Results] [GOOD] >> test.py::test[window-full/session_aliases_compact--Results] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-ForceBlocks] >> test.py::test[count-count--ForceBlocks] [GOOD] >> test.py::test[count-count--Results] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_lambda_presort_twin_list--Results] >> test.py::test[action-eval_anon_table--Results] >> test.py::test[column_order-select_sample-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin_list--Results] [SKIPPED] >> test.py::test[sampling-map--ForceBlocks] >> test.py::test[dq-mem_limit--ForceBlocks] [SKIPPED] >> test.py::test[dq-mem_limit--Results] [SKIPPED] >> test.py::test[pg-tpcds-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] >> test.py::test[sampling-join_left_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-join_left_sample-default.txt-Results] >> test.py::test[action-eval_column--Results] [GOOD] >> test.py::test[action-evaluate_queries--ForceBlocks] >> test.py::test[window-current/session_incompat_sort--Results] [GOOD] >> test.py::test[window-full/aggregations--Results] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_max_sorted_tables--ForceBlocks] [SKIPPED] >> test.py::test[table_range-concat_sorted_max_sorted_tables--Results] [SKIPPED] >> test.py::test[table_range-each_with_non_existing--ForceBlocks] >> test.py::test[expr-langver--Results] [GOOD] >> test.py::test[expr-non_persistable_insert_into_fail--Results] >> test.py::test[schema-def_values_job--Results] [GOOD] >> test.py::test[schema-select_all-row_spec-ForceBlocks] >> test.py::test[pg-tpcds-q86-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-ForceBlocks] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python_presort--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_row--Results] [SKIPPED] >> test.py::test[ql_filter-integer_optional_null--Results] >> test.py::test[table_range-each_with_non_existing_all_fail--ForceBlocks] [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-ForceBlocks] >> test.py::test[join-pullup_extra_columns--Results] [GOOD] >> test.py::test[join-pullup_null_column--ForceBlocks] |97.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[dq-mem_limit--Results] [SKIPPED] >> test.py::test[insert-merge_publish--Results] [GOOD] >> test.py::test[insert-two_input_tables--ForceBlocks] >> test.py::test[weak_field-hor_join_with_mix_weak_access--ForceBlocks] [GOOD] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] >> test.py::test[action-subquery_opt_args-default.txt-Results] [GOOD] |97.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[aggregate-group_by_session_extended--Results] [GOOD] >> test.py::test[join-star_join_inners_premap-off-Results] [SKIPPED] >> test.py::test[join-starjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-yql-8980-off-Results] [SKIPPED] >> test.py::test[join-yql_465-off-Results] >> test.py::test[agg_apply-table--Results] >> test.py::test[join-yql_465-off-Results] [SKIPPED] >> test.py::test[key_filter-contains_optional--Results] >> test.py::test[blocks-minmax_strings--Results] [GOOD] >> test.py::test[blocks-minmax_tuple--Results] >> test.py::test[view-file_inner_library--Results] [GOOD] >> test.py::test[view-view_with_library--Results] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] >> test.py::test[optimizers-unused_columns_window--Results] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] >> test.py::test[blocks-combine_all_avg_filter_opt--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] >> test.py::test[optimizers-yql-6133_skip_deps--Results] [SKIPPED] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] >> test.py::test[pg-tpcds-q36-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-ForceBlocks] >> test.py::test[expr-non_persistable_insert_into_fail--Results] [GOOD] >> test.py::test[hor_join-filters--Results] >> test.py::test[order_by-SortByTwoFields--ForceBlocks] >> test.py::test[join-mergejoin_left_null_column--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-ForceBlocks] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] >> test.py::test[sampling-join_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-ForceBlocks] >> test.py::test[weak_field-hor_join_with_mix_weak_access--Results] [GOOD] >> test.py::test[window-distinct_over_window--ForceBlocks] >> test.py::test[join-mergejoin_left_null_column--Results] >> test.py::test[column_order-insert_reorder_without_columnorder--Results] [GOOD] >> test.py::test[count-count_all_grouped-empty-Results] >> test.py::test[join-mergejoin_force_one_sorted--Results] [GOOD] >> test.py::test[join-mergejoin_left_null_column--Results] >> test.py::test[aggregate-percentiles_grouped--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr--Results] >> test.py::test[select-literal_negative-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-literal_negative-default.txt-Results] >> test.py::test[aggr_factory-histogram-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByTwoFields--Results] >> test.py::test[tpch-q6-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q6-default.txt-Results] >> test.py::test[pg-tpcds-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-Results] |97.4%| [TA] $(B)/ydb/tests/fq/yds/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[join-nopushdown_filter_with_depends_on-off-ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on-off-Results] [SKIPPED] >> test.py::test[join-premap_map_semi-off-ForceBlocks] >> test.py::test[action-eval_anon_table--Results] [GOOD] >> test.py::test[action-eval_folder_via_file--Results] >> test.py::test[key_filter-convert--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_list--ForceBlocks] >> test.py::test[blocks-combine_all_avg_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter--ForceBlocks] >> test.py::test[action-evaluate_queries--ForceBlocks] [GOOD] >> test.py::test[action-evaluate_queries--Results] >> test.py::test[ql_filter-integer_optional_null--Results] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] >> test.py::test[type_v3-append_diff_layout1--Results] >> test.py::test[table_range-each_with_non_existing--ForceBlocks] [GOOD] >> test.py::test[sampling-map--ForceBlocks] [GOOD] >> test.py::test[sampling-map--Results] >> test.py::test[table_range-each_with_non_existing--Results] >> test.py::test[join-premap_common_cross--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-Results] >> test.py::test[aggregate-group_by_session_distinct--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only--Results] >> test.py::test[join-pullup_null_column--ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column--Results] >> test.py::test[pg-tpcds-q95-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-literal_negative-default.txt-Results] [GOOD] >> test.py::test[select-optional_pull--ForceBlocks] >> test.py::test[join-premap_common_inner_both_sides-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner_filter--Results] >> test.py::test[type_v3-ignore_v3_hint-protofield-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] >> test.py::test[pg-all_data--Results] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] >> test.py::test[pg-tpcds-q95-default.txt-Results] |97.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[key_filter-convert--ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec-Results] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] >> test.py::test[tpch-q6-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt--ForceBlocks] >> test.py::test[agg_apply-table--Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Results] >> test.py::test[join-mergejoin_left_null_column--Results] [GOOD] >> test.py::test[join-mergejoin_left_null_column-off-ForceBlocks] >> test.py::test[key_filter-contains_optional--Results] [GOOD] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] [GOOD] >> test.py::test[bigdate-tz_table_fill--ForceBlocks] >> test.py::test[join-yql-8125-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-8125-off-Results] [SKIPPED] >> test.py::test[key_filter-contains-default.txt-ForceBlocks] >> test.py::test[action-evaluate_queries--Results] [GOOD] >> test.py::test[action-unwrap_runtime_fail_with_column_message--ForceBlocks] >> test.py::test[view-view_with_library--Results] [GOOD] >> test.py::test[weak_field-weak_field_num_access--Results] >> test.py::test[key_filter-decimal--ForceBlocks] [GOOD] >> test.py::test[key_filter-decimal--Results] >> test.py::test[insert-two_input_tables--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Results] >> test.py::test[insert-two_input_tables--Results] >> test.py::test[table_range-each_with_non_existing--Results] [GOOD] >> test.py::test[count-count--Results] [GOOD] >> test.py::test[table_range-range_over_filter--ForceBlocks] >> test.py::test[count-count_no_grouping-default.txt-ForceBlocks] >> test.py::test[blocks-pg_top_sort--ForceBlocks] |97.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[pg-all_data--Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Results] [GOOD] >> test.py::test[count-count_all_grouped-empty-Results] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt-Results] >> test_ydb_backup.py::TestClusterBackup::test_cluster_backup [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] [GOOD] >> test.py::test[sampling-map--Results] [GOOD] >> test.py::test[sampling-reduce-with_premap-ForceBlocks] [SKIPPED] >> test.py::test[sampling-reduce-with_premap-Results] [SKIPPED] >> test.py::test[sampling-yql-14664_deps-default.txt-ForceBlocks] >> test.py::test[schema-select_all-row_spec-Results] [GOOD] >> test.py::test[select-column_labels-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q17-default.txt-ForceBlocks] |97.4%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part13/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[view-init_view_after_eval-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q13-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Results] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] >> test.py::test[hor_join-filters--Results] [GOOD] >> test.py::test[hor_join-out_sampling--Results] >> test.py::test[order_by-SortByTwoFields--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByTwoFields--Results] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] >> test.py::test[action-eval_folder_via_file--Results] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[action-eval_pragma--Results] >> test.py::test[action-unwrap_runtime_fail_with_column_message--ForceBlocks] [GOOD] >> test.py::test[action-unwrap_runtime_fail_with_column_message--Results] [GOOD] >> test.py::test[agg_apply-table--ForceBlocks] >> test.py::test[select-discard-default.txt-Results] [GOOD] >> test.py::test[select-dot_in_alias-default.txt-Results] >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[join-pullup_null_column-off-ForceBlocks] >> test.py::test[insert-two_input_tables--Results] [GOOD] >> test.py::test[insert_monotonic-several2-default.txt-ForceBlocks] >> test.py::test[type_v3-append_diff_layout1--Results] [GOOD] >> test.py::test[udf-udaf_distinct--ForceBlocks] >> test.py::test[window-presort_window_partition_by_table-default.txt-Results] [GOOD] >> test.py::test[window-rank/unordered--ForceBlocks] >> test.py::test[action-eval_anon_table--ForceBlocks] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] >> test.py::test[join-pullup_inner--ForceBlocks] >> test.py::test[sampling-bind_default-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] >> test.py::test[join-premap_map_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_map_semi-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort2-off-ForceBlocks] |97.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[order_by-SortByTwoFields--Results] [GOOD] >> test.py::test[order_by-assume_over_input--ForceBlocks] >> test.py::test[blocks-combine_all_max_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_max_filter--Results] >> test.py::test[key_filter-decimal--Results] [GOOD] >> test.py::test[lineage-reduce_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_mix_fields-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[flatten_by-flatten_list--ForceBlocks] [GOOD] >> test.py::test[aggregate-GroupByTwoFields--Results] [GOOD] >> test.py::test[flatten_by-flatten_list--Results] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] >> test.py::test[lineage-select_mix_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_no_aggregate_split--ForceBlocks] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] [GOOD] >> test.py::test[order_by-SortByOneFieldDesc--Results] >> test.py::test[type_v3-decimal_yt--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt--Results] |97.4%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part5/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_by_udf--ForceBlocks] >> test.py::test[aggregate-group_by_session_only--Results] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] >> test.py::test[aggregate-having_distinct_expr--Results] >> test.py::test[bigdate-tz_table_fill--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_fill--Results] >> test.py::test[join-mergejoin_left_null_column--Results] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] >> test.py::test[pg-tpcds-q52-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Results] >> test.py::test[join-mergejoin_left_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_left_null_column-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested--ForceBlocks] >> test.py::test[weak_field-weak_field_num_access--Results] [GOOD] >> test.py::test[window-current/session_aliases--Results] >> test.py::test[select-optional_pull--ForceBlocks] [GOOD] >> test.py::test[select-optional_pull--Results] >> test.py::test[blocks-pg_top_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_top_sort--Results] >> test.py::test[key_filter-contains-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-contains-default.txt-Results] >> test.py::test[blocks-combine_all_max_filter--Results] [GOOD] >> test.py::test[blocks-combine_hashed_some--ForceBlocks] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[schema-limit_directread--ForceBlocks] >> test.py::test[action-eval_pragma--Results] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Results] >> test.py::test[table_range-range_over_filter--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_filter--Results] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-ForceBlocks] >> test.py::test[count-count_no_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] |97.4%| [TA] {RESULT} $(B)/ydb/tests/fq/yds/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[flatten_by-flatten_list--Results] [GOOD] >> test.py::test[flatten_by-flatten_two_fields--ForceBlocks] >> test.py::test[type_v3-decimal_yt--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-ForceBlocks] >> test.py::test[view-init_view_after_eval-default.txt-ForceBlocks] [GOOD] >> test.py::test[view-init_view_after_eval-default.txt-Results] >> test.py::test[select-column_labels-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] >> test.py::test[join-premap_common_inner_filter--Results] [GOOD] >> test.py::test[join-premap_common_multiparents--Results] >> test.py::test[bigdate-tz_table_fill--Results] [GOOD] |97.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[hor_join-row_num_per_sect--ForceBlocks] >> test.py::test[join-pullup_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners-off-ForceBlocks] >> test.py::test[sampling-yql-14664_deps-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt-Results] [GOOD] >> test.py::test[key_filter-mixed_opt_bounds--Results] [SKIPPED] >> test.py::test[key_filter-multiusage--Results] >> test.py::test[select-dot_in_alias-default.txt-Results] [GOOD] >> test.py::test[select-from_in_front-default.txt-Results] >> test.py::test[distinct-distinct_by_tuple-default.txt-Results] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--Results] [SKIPPED] >> test.py::test[dq-precompute_asyncfile--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel_indep--Results] [SKIPPED] >> test.py::test[dq-precompute_tree-default.txt-Results] [SKIPPED] >> test.py::test[expr-double_join_with_list_from_range--Results] >> test.py::test[window-distinct_over_window--ForceBlocks] [GOOD] >> test.py::test[window-distinct_over_window--Results] >> test.py::test[count-count_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-ForceBlocks] >> test.py::test[key_filter-contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-datetime-default.txt-ForceBlocks] >> test.py::test[agg_apply-table--ForceBlocks] [GOOD] >> test.py::test[agg_apply-table--Results] >> test.py::test[blocks-minmax_tuple--Results] [GOOD] >> test.py::test[blocks-not--Results] >> test.py::test[select-optional_pull--Results] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-ForceBlocks] >> test.py::test[action-eval_anon_table--ForceBlocks] [GOOD] >> test.py::test[action-eval_anon_table--Results] >> test.py::test[table_range-range_over_filter--Results] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] [GOOD] >> test.py::test[select-struct_members-default.txt-ForceBlocks] >> test.py::test[join-pullup_inner--ForceBlocks] [GOOD] >> test.py::test[join-pullup_inner--Results] |97.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[bigdate-tz_table_fill--Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-min_by-default.txt-Results] >> test.py::test[blocks-pg_top_sort--Results] [GOOD] >> test.py::test[blocks-string_as_agg_key--ForceBlocks] >> test.py::test[order_by-assume_over_input--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_over_input--Results] >> test.py::test[insert_monotonic-several2-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-several2-default.txt-Results] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_with_assume--Results] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[schema-copy-read_schema-Results] >> test.py::test[view-init_view_after_eval-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--ForceBlocks] >> test.py::test[bigdate-table_explicit_cast-default.txt-ForceBlocks] >> test.py::test[window-full/aggregations--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--Results] >> test.py::test[udf-udaf_distinct--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByOneFieldDesc--Results] [GOOD] >> test.py::test[udf-udaf_distinct--Results] >> test.py::test[order_by-assume_over_input_desc--Results] [SKIPPED] >> test.py::test[order_by-limit--Results] >> test.py::test[window-rank/unordered--ForceBlocks] [GOOD] >> test.py::test[window-rank/unordered--Results] >> test.py::test[join-premap_merge_extrasort2-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_extrasort2-off-Results] [SKIPPED] >> test.py::test[join-pullup_context_dep-off-ForceBlocks] >> test.py::test[aggregate-percentiles_grouped_expr--Results] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] |97.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[table_range-range_over_filter--Results] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Results] >> test.py::test[action-eval_anon_table--Results] [GOOD] >> test.py::test[action-eval_folder_via_file--ForceBlocks] >> test.py::test[action-subquery_merge_nested_subquery--Results] [GOOD] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] >> test.py::test[schema-fake_column-default.txt-ForceBlocks] >> test.py::test[agg_apply-table--Results] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-ForceBlocks] >> test.py::test[action-table_content_before_from_folder--Results] >> test.py::test[pg-tpch-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_by_udf--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_by_udf--Results] >> test.py::test[insert_monotonic-several2-default.txt-Results] [GOOD] >> test.py::test[join-compact_join--ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_nested--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested--Results] >> test.py::test[order_by-assume_over_input--Results] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--ForceBlocks] >> test.py::test[optimizers-test_no_aggregate_split--ForceBlocks] [GOOD] >> test.py::test[optimizers-test_no_aggregate_split--Results] >> test.py::test[blocks-combine_hashed_some--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_some--Results] >> test.py::test[schema-limit_directread--ForceBlocks] [GOOD] >> test.py::test[schema-limit_directread--Results] >> test.py::test[join-pullup_inner--Results] [GOOD] >> test.py::test[join-three_equalities--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_simple--Results] >> test.py::test[aggregate-aggregation_and_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] |97.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[pg-tpch-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] >> test.py::test[window-distinct_over_window--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--ForceBlocks] >> test.py::test[select-from_in_front-default.txt-Results] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] >> test.py::test[aggregate-aggregation_by_udf--Results] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt-ForceBlocks] >> test.py::test[aggregate-having_distinct_expr--Results] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Results] >> test.py::test[udf-udaf_distinct--Results] [GOOD] >> test.py::test[view-file_inner--ForceBlocks] >> test.py::test[blocks-not--Results] [GOOD] >> test.py::test[blocks-pg_to_numbers--Results] |97.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part5/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[flatten_by-flatten_two_fields--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_two_fields--Results] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_mapjoin-default.txt-Results] [SKIPPED] >> test.py::test[sampling-subquery_sort-default.txt-Results] >> test.py::test[window-rank/unordered--Results] [GOOD] >> test.py::test[window-win_over_few_partitions--ForceBlocks] >> test.py::test[select-select_all_filtered-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-Results] >> test.py::test[schema-limit_directread--Results] [GOOD] >> test.py::test[schema-remap_desc--ForceBlocks] >> test.py::test[hor_join-row_num_per_sect--ForceBlocks] [GOOD] >> test.py::test[hor_join-row_num_per_sect--Results] >> test.py::test[schema-copy-read_schema-Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] >> test.py::test[order_by-literal_with_assume--Results] [GOOD] >> test.py::test[order_by-warn_offset_wo_sort--Results] >> test.py::test[select-struct_members-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-struct_members-default.txt-Results] >> test.py::test[blocks-combine_hashed_some--Results] [GOOD] >> test.py::test[blocks-compare_dates_floats_bools--ForceBlocks] [SKIPPED] >> test.py::test[blocks-compare_dates_floats_bools--Results] [SKIPPED] >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[in-in_immediate_subquery-default.txt-Results] >> test.py::test[count-count_nullable_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Results] >> test.py::test[weak_field-weak_field_long_fields--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--Results] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_nested--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off-ForceBlocks] >> test.py::test[blocks-string_as_agg_key--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Results] >> test.py::test[blocks-string_as_agg_key--Results] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--ForceBlocks] >> test.py::test[join-star_join_inners-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] >> test.py::test[optimizers-test_no_aggregate_split--Results] [GOOD] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] [SKIPPED] >> test.py::test[select-select_all_filtered-default.txt-Results] [GOOD] >> test.py::test[select-select_all_group_by_column--ForceBlocks] >> test.py::test[order_by-limit--Results] [GOOD] >> test.py::test[order_by-sort_simple--Results] >> test.py::test[join-pullup_context_dep-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_context_dep-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key--ForceBlocks] |97.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[blocks-compare_dates_floats_bools--Results] [SKIPPED] >> test.py::test[schema-fake_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-fake_column-default.txt-Results] >> test.py::test[action-table_content_before_from_folder--Results] [GOOD] >> test.py::test[aggr_factory-boolor-default.txt-Results] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] [GOOD] >> test.py::test[join-premap_common_left_cross--Results] >> test.py::test[select-struct_members-default.txt-Results] [GOOD] >> test.py::test[select-where_not_null--ForceBlocks] >> test.py::test[action-eval_folder_via_file--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--ForceBlocks] >> test.py::test[key_filter-multiusage--Results] [GOOD] >> test.py::test[key_filter-nile_pred--Results] >> test.py::test[window-current/session_aliases--Results] [GOOD] >> test.py::test[window-full/aggregations_compact--Results] >> test.py::test[action-eval_folder_via_file--Results] >> test.py::test[join-compact_join--ForceBlocks] [GOOD] >> test.py::test[join-compact_join--Results] >> test.py::test[flatten_by-flatten_two_fields--Results] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-ForceBlocks] |97.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] [SKIPPED] >> test.py::test[hor_join-row_num_per_sect--Results] [GOOD] >> test.py::test[join-filter_joined-off-ForceBlocks] >> test.py::test[order_by-extract_members_over_sort_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Results] >> test.py::test[aggr_factory-min_by-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] >> test.py::test[view-file_inner--ForceBlocks] [GOOD] >> test.py::test[view-file_inner--Results] >> test.py::test[select-exists_with_table-default.txt-ForceBlocks] >> test.py::test[count-count_nullable_sub-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-ForceBlocks] >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-read_schema-ForceBlocks] >> test.py::test[join-three_equalities--ForceBlocks] [GOOD] >> test.py::test[join-three_equalities--Results] >> test.py::test[aggr_factory-bottom_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] >> test.py::test[join-premap_common_multiparents--Results] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-Results] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap--Results] >> test.py::test[action-eval_folder_via_file--Results] [GOOD] >> test.py::test[action-eval_regexp--ForceBlocks] >> test.py::test[bigdate-table_explicit_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] >> test.py::test[sampling-subquery_sort-default.txt-Results] [GOOD] >> test.py::test[schema-insert-read_schema-Results] >> test.py::test[blocks-pg_to_dates--ForceBlocks] >> test.py::test[blocks-string_as_agg_key--Results] [GOOD] >> test.py::test[blocks-string_filter--ForceBlocks] >> test.py::test[order_by-warn_offset_wo_sort--Results] [GOOD] >> test.py::test[pg-join_using_tables4-default.txt-Results] >> test.py::test[blocks-pg_to_numbers--Results] [GOOD] >> test.py::test[blocks-pg_top_sort--Results] >> test.py::test[aggregate-avg_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt-Results] >> test.py::test[schema-remap_desc--ForceBlocks] [GOOD] >> test.py::test[schema-remap_desc--Results] >> test.py::test[expr-double_join_with_list_from_range--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_str--Results] >> test.py::test[schema-insert_sorted-row_spec-Results] [GOOD] >> test.py::test[schema-other--Results] [SKIPPED] >> test.py::test[schema-select_field-row_spec-Results] >> test.py::test[view-file_inner--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine--ForceBlocks] >> test.py::test[pg-tpcds-q75-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Results] >> test.py::test[select-qualified_all_and_group_by-default.txt-Results] [GOOD] >> test.py::test[select-result_size_limit_with_fill--Results] [SKIPPED] >> test.py::test[select-simple_struct_field_access--Results] >> test.py::test[order_by-extract_members_over_sort_desc--Results] [GOOD] >> test.py::test[order_by-literal_desc--ForceBlocks] >> test.py::test[udf-udf_call_with_group_and_limit--ForceBlocks] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--Results] >> test.py::test[join-compact_join--Results] [GOOD] >> test.py::test[join-inner_grouped--ForceBlocks] >> test.py::test[select-select_all_group_by_column--ForceBlocks] [GOOD] >> test.py::test[select-select_all_group_by_column--Results] >> test.py::test[window-full/noncompact_with_nulls--ForceBlocks] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--Results] >> test.py::test[aggregate-group_by_gs_alt_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] >> test.py::test[aggregate-aggregate_distinct_list-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off-Results] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn-off-ForceBlocks] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] >> test.py::test[schema-remap_desc--Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_udf--ForceBlocks] >> test.py::test[aggregate-avg_interval-default.txt-Results] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--ForceBlocks] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-on-Results] [SKIPPED] >> test.py::test[binding-anon_table_binding-default.txt-Results] >> test.py::test[aggr_factory-stddev-default.txt-ForceBlocks] >> test.py::test[window-win_over_few_partitions--ForceBlocks] [GOOD] >> test.py::test[window-win_over_few_partitions--Results] >> test.py::test[window-full/aggregations_leadlag_compact--Results] [GOOD] >> test.py::test[join-three_equalities--Results] [GOOD] >> test.py::test[join-yql-19081--ForceBlocks] [SKIPPED] >> test.py::test[window-full/session_incompat_sort--Results] >> test.py::test[join-yql-19081--Results] [SKIPPED] >> test.py::test[json-json_exists/example--ForceBlocks] >> test.py::test[join-split_to_list_as_key--ForceBlocks] [GOOD] >> test.py::test[join-split_to_list_as_key--Results] >> test.py::test[key_filter-nile_pred--Results] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-Results] >> test.py::test[key_filter-range_union--Results] >> test.py::test[select-where_not_null--ForceBlocks] [GOOD] >> test.py::test[select-where_not_null--Results] |97.5%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part0/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_pull--ForceBlocks] >> test.py::test[order_by-sort_simple--Results] [GOOD] >> test.py::test[params-complex_yson--Results] >> test.py::test[distinct-distinct_having_no_agg-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] >> test.py::test[join-filter_joined-off-ForceBlocks] [GOOD] >> test.py::test[join-filter_joined-off-Results] [SKIPPED] >> test.py::test[join-from_in_front_join-off-ForceBlocks] >> test.py::test[select-exists_with_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_with_table-default.txt-Results] >> test.py::test[select-select_all_group_by_column--Results] [GOOD] >> test.py::test[schema-insert-read_schema-Results] [GOOD] >> test.py::test[select-shift_columns-default.txt-ForceBlocks] >> test.py::test[schema-other_job--Results] [SKIPPED] >> test.py::test[schema-patchtype--Results] >> test.py::test[schema-select_all-read_schema-ForceBlocks] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--Results] [GOOD] >> test.py::test[schema-select_all-read_schema-Results] >> test.py::test[select-simple_struct_field_access--Results] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-Results] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--ForceBlocks] >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-Results] >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-Results] [GOOD] >> test.py::test[insert-after_group_by-default.txt-ForceBlocks] >> test.py::test[schema-select_field-row_spec-Results] [GOOD] >> test.py::test[schema-select_field-schema-Results] >> test.py::test[file-parse_file_in_select_as_str--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] >> test.py::test[aggregate-group_by_hop_star--ForceBlocks] >> test.py::test[blocks-pg_to_dates--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_dates--Results] >> test.py::test[blocks-string_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-string_filter--Results] >> test.py::test[select-where_not_null--Results] [GOOD] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-ForceBlocks] >> test.py::test[action-eval_regexp--ForceBlocks] [GOOD] >> test.py::test[action-eval_regexp--Results] >> test.py::test[aggr_factory-boolor-default.txt-Results] [GOOD] >> test.py::test[select-exists_with_table-default.txt-Results] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-ForceBlocks] >> test.py::test[aggr_factory-max_by-default.txt-Results] >> test.py::test[order_by-literal_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_desc--Results] >> test.py::test[schema-select_all-read_schema-Results] [GOOD] >> test.py::test[schema-select_all_inferschema2--ForceBlocks] |97.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[udf-udf_call_with_group_and_limit--Results] [GOOD] >> test.py::test[join-left_semi_with_other-off-ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_map_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> test.py::test[blocks-pg_top_sort--Results] [GOOD] >> test.py::test[blocks-sort_two_asc--Results] >> test.py::test[distinct-distinct_star-default.txt-ForceBlocks] >> test.py::test[window-win_over_few_partitions--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] [GOOD] >> test.py::test[join-yql-8125--ForceBlocks] >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test.py::test[join-star_join-off-ForceBlocks] >> test.py::test[window-full/aggregations_leadlag_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--Results] >> test.py::test[ypath-multi_range-default.txt-ForceBlocks] >> test.py::test[window-full/noncompact_with_nulls--Results] [GOOD] >> test.py::test[window-leading/aggregations--ForceBlocks] |97.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Results] [GOOD] >> test.py::test[join-pullup_context_dep--Results] >> test.py::test[join-inner_grouped--ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped--Results] >> test.py::test[action-eval_regexp--Results] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-ForceBlocks] >> test.py::test[schema-select_all_inferschema_op--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_op--Results] >> test.py::test[binding-anon_table_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_decimal--Results] >> test.py::test[blocks-pg_to_dates--Results] [GOOD] >> test.py::test[blocks-sort_two_mix--ForceBlocks] >> test.py::test[blocks-string_filter--Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] >> test.py::test[order_by-literal_desc--Results] [GOOD] >> test.py::test[order_by-native_desc_assume_with_transform--ForceBlocks] >> test.py::test[order_by-native_desc_assume_with_transform--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_assume_with_transform--Results] [SKIPPED] >> test.py::test[order_by-native_desc_sort_calc--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort_calc--Results] [SKIPPED] >> test.py::test[order_by-order_by_dynum_desc-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_simple--Results] [GOOD] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Results] [SKIPPED] >> test.py::test[aggregate-percentiles_ungrouped--Results] >> test.py::test[params-complex_yson--Results] [GOOD] >> test.py::test[pg-insert--Results] >> test.py::test[join-no_empty_join_for_dyn-off-ForceBlocks] [GOOD] >> test.py::test[join-no_empty_join_for_dyn-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner-off-ForceBlocks] >> test.py::test[aggregate-native_desc_group_compact_by--Results] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] >> test.py::test[join-premap_common_left_cross--Results] [GOOD] >> test.py::test[join-pullup_null_column-off-Results] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off-Results] [SKIPPED] >> test.py::test[join-star_join-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-off-Results] [SKIPPED] >> test.py::test[join-strict_keys--Results] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda--Results] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_where--ForceBlocks] >> test.py::test[aggregate-count_distinct_with_filter--ForceBlocks] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Results] >> test.py::test[select-shift_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-shift_columns-default.txt-Results] |97.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part0/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[pg-tpcds-q90-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Results] >> test.py::test[bigdate-tz_table_pull--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_pull--Results] >> test.py::test[select-trivial_order_by-default.txt-Results] [GOOD] >> test.py::test[select-type_assert-default.txt-Results] >> test.py::test[schema-select_all_inferschema_op--Results] [GOOD] >> test.py::test[schema-select_field-schema-Results] [GOOD] >> test.py::test[schema-user_schema_append--Results] >> test.py::test[pg-join_using_tables4-default.txt-Results] [GOOD] >> test.py::test[pg-nulls_native-default.txt-Results] >> test.py::test[schema-patchtype--Results] [GOOD] >> test.py::test[schema-remap_desc--Results] >> test.py::test[join-from_in_front_join-off-ForceBlocks] [GOOD] >> test.py::test[join-from_in_front_join-off-Results] [SKIPPED] >> test.py::test[join-grace_join1-off-ForceBlocks] >> test_ydb_backup.py::TestDatabaseBackup::test_database_backup >> test.py::test[aggregate-group_by_hop_star--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_star--Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_aggr_expr--ForceBlocks] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[schema-select_all_inferschema_op--Results] [GOOD] >> test.py::test[join-inner_grouped--Results] [GOOD] >> test.py::test[join-join_cbo_3_tables--ForceBlocks] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_one_field--Results] >> test.py::test[insert-after_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-after_group_by-default.txt-Results] >> test.py::test[window-full/aggregations_compact--Results] [GOOD] >> test.py::test[window-full/session_aliases_compact--Results] >> test.py::test[select-shift_columns-default.txt-Results] [GOOD] >> test.py::test[select-table_content_with_tmp_folder--ForceBlocks] >> test.py::test[bigdate-tz_table_pull--Results] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_udf--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf--Results] >> test.py::test[join-strict_keys--Results] [GOOD] >> test.py::test[join-yql-14847-off-Results] [SKIPPED] >> test.py::test[join-yql-8980--Results] >> test.py::test[aggregate-count_distinct_with_filter--Results] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-ForceBlocks] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] >> test.py::test[aggr_factory-stddev-default.txt-Results] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-Results] >> test.py::test[window-full/aggregations_leadlag_compact--Results] [GOOD] >> test.py::test[window-full/session_aliases--ForceBlocks] >> test.py::test[blocks-sort_two_asc--Results] [GOOD] >> test.py::test[blocks-string_len_and_cmp--Results] >> test.py::test[aggregate-group_by_gs_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Results] >> test.py::test[key_filter-range_union--Results] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic-Results] >> test.py::test[schema-select_all_inferschema2--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema2--Results] >> test.py::test[blocks-add_decimal--Results] [GOOD] >> test.py::test[blocks-block_input-aux_columns-Results] [SKIPPED] >> test.py::test[blocks-coalesce_bools--Results] >> test.py::test[schema-user_schema_append--Results] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] >> test.py::test[join-left_semi_with_other-off-ForceBlocks] [GOOD] >> test.py::test[join-left_semi_with_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_csee-off-ForceBlocks] >> test.py::test[ypath-multi_range-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-multi_range-default.txt-Results] >> test.py::test[pg-insert--Results] [GOOD] >> test.py::test[pg-nulls-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> test.py::test[blocks-sort_two_mix--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_mix--Results] >> test.py::test[pg-tpcds-q93-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Results] >> test.py::test[order_by-order_by_dynum_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-Results] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] >> test.py::test[json-json_exists/example--ForceBlocks] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-ForceBlocks] >> test.py::test[join-premap_common_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner-off-Results] >> test.py::test[json-json_exists/example--Results] >> test.py::test[action-eval_each_input_table-default.txt-ForceBlocks] >> test.py::test[insert-after_group_by-default.txt-Results] [GOOD] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[join-premap_common_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_context_dep--ForceBlocks] >> test.py::test[ypath-multi_range-default.txt-Results] [GOOD] >> test.py::test[ytflow-select_over_static--ForceBlocks] >> test.py::test[insert-part_sortness--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-ForceBlocks] >> test.py::test[join-star_join-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join-off-Results] >> test.py::test[blocks-combine_all_min_filter_opt--ForceBlocks] >> test.py::test[ytflow-select_over_static--ForceBlocks] [SKIPPED] >> test.py::test[ytflow-select_over_static--Results] [SKIPPED] >> test.py::test[select-type_assert-default.txt-Results] [GOOD] >> test.py::test[select-where_cast-default.txt-Results] >> test.py::test[join-star_join-off-Results] [SKIPPED] >> test.py::test[join-yql-14847--ForceBlocks] >> test.py::test[schema-select_all_inferschema2--Results] [GOOD] >> test.py::test[schema-select_reordered-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_with_lambda--Results] [GOOD] >> test.py::test[aggregate-compare_by_tuple--Results] >> test.py::test[aggr_factory-max_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Results] >> test.py::test[aggregate-percentiles_ungrouped--Results] [GOOD] >> test.py::test[aggregate-rollup_with_dict--Results] >> test.py::test[weak_field-weak_field_join_where--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Results] >> test.py::test[blocks-sort_two_mix--Results] [GOOD] >> test.py::test[column_group-hint-disable-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint-disable-Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] >> test.py::test[join-grace_join1-off-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-off-Results] [SKIPPED] >> test.py::test[join-inner_trivial--ForceBlocks] >> test.py::test[column_group-hint-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail3--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail3--Results] [SKIPPED] >> test.py::test[column_group-many_inserts--ForceBlocks] [SKIPPED] >> test.py::test[column_group-many_inserts--Results] [SKIPPED] >> test.py::test[column_order-winfunc-default.txt-ForceBlocks] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[ytflow-select_over_static--Results] [SKIPPED] >> test.py::test[aggregate-group_by_gs_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-ForceBlocks] >> test.py::test[join-pullup_context_dep--Results] [GOOD] >> test.py::test[join-pullup_left_semi--Results] >> test.py::test[join-yql-8125--ForceBlocks] [GOOD] >> test.py::test[join-yql-8125--Results] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--ForceBlocks] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] >> test.py::test[schema-remap_desc--Results] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_udf--Results] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_override--Results] >> test.py::test[select-table_content_with_tmp_folder--ForceBlocks] [GOOD] >> test.py::test[select-table_content_with_tmp_folder--Results] >> test.py::test[limit-insert_with_limit-dynamic-Results] [GOOD] >> test.py::test[limit-limit_offset-default.txt-Results] >> test.py::test[join-join_cbo_3_tables--ForceBlocks] [GOOD] >> test.py::test[join-join_cbo_3_tables--Results] >> test.py::test[pg-nulls_native-default.txt-Results] [GOOD] >> test.py::test[pg-point-default.txt-Results] >> test.py::test[select-column_labels-default.txt-Results] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Results] >> test.py::test[window-full/session_incompat_sort--Results] [GOOD] >> test.py::test[window-lagging/aggregations--Results] >> test.py::test[json-json_exists/example--Results] [GOOD] >> test.py::test[key_filter-is_null--ForceBlocks] >> test.py::test[pg-tpcds-q97-default.txt-Results] [GOOD] >> test.py::test[blocks-coalesce_bools--Results] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Results] >> test.py::test[window-leading/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-leading/aggregations--Results] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] >> test.py::test[flatten_by-flatten_one_field--Results] [GOOD] >> test.py::test[hor_join-max_in_tables--Results] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-sum_if-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_result-default.txt-ForceBlocks] >> test.py::test[select-table_content_with_tmp_folder--Results] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_join_where--Results] [GOOD] >> test.py::test[window-current/ansi_current_with_win--ForceBlocks] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[select-column_labels-default.txt-Results] [GOOD] >> test.py::test[select-where_cast-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Results] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt-Results] >> test.py::test[select-exists_true-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_aggr_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] >> test.py::test[binding-table_filter_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint32--ForceBlocks] >> test.py::test[blocks-string_len_and_cmp--Results] [GOOD] >> test.py::test[case-case_multi_val-default.txt-Results] >> test.py::test[insert-part_sortness--ForceBlocks] [GOOD] >> test.py::test[insert-part_sortness--Results] >> test.py::test[join-premap_context_dep--ForceBlocks] [GOOD] >> test.py::test[join-premap_context_dep--Results] >> test.py::test[blocks-combine_all_min_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[pg-tpcds-q97-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] >> test.py::test[schema-select_reordered-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o--ForceBlocks] >> test.py::test[join-yql-8125--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] >> test.py::test[schema-select_reordered-default.txt-Results] >> test.py::test[action-eval_each_input_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] >> test.py::test[pg-nulls-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns-default.txt-Results] >> test.py::test[schema-user_schema_override--Results] [GOOD] >> test.py::test[join-inner_trivial--ForceBlocks] [GOOD] >> test.py::test[join-yql-14847--ForceBlocks] [GOOD] >> test.py::test[join-yql-14847--Results] >> test.py::test[join-inner_trivial--Results] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[select-where_cast-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-default-Results] [SKIPPED] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] >> test.py::test[order_by-order_by_expr_mul_cols--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--Results] >> test.py::test[insert-part_sortness--Results] [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-ForceBlocks] >> test.py::test[window-full/session_aliases_compact--Results] [GOOD] >> test.py::test[window-generic/aggregations_before_current--Results] >> test.py::test[window-full/session_aliases--ForceBlocks] [GOOD] >> test.py::test[window-full/session_aliases--Results] >> test.py::test[schema-select_reordered-default.txt-Results] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-ForceBlocks] >> test.py::test[aggregate-table_row_aggregation-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] [GOOD] >> test.py::test[blocks-decimal_avg--ForceBlocks] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] >> test.py::test[join-pullup_left_semi--Results] [GOOD] >> test.py::test[join-pullup_null_column--Results] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-ForceBlocks] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[schema-user_schema_override--Results] [GOOD] >> test.py::test[column_order-winfunc-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-winfunc-default.txt-Results] >> test.py::test[pg-point-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-Results] >> test.py::test[join-join_cbo_3_tables--Results] [GOOD] >> test.py::test[join-lookupjoin_take_skip--ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_take_skip--Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-ForceBlocks] >> test.py::test[limit-limit_offset-default.txt-Results] [GOOD] >> test.py::test[limit-yql-8611_calc_peephole--Results] >> test.py::test[aggregate-rollup_with_dict--Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] >> test.py::test[join-yql-8980--Results] [GOOD] >> test.py::test[join-yql_465--Results] >> test.py::test[join-premap_context_dep--Results] [GOOD] >> test.py::test[join-premap_context_dep-off-ForceBlocks] >> test.py::test[key_filter-datetime-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-datetime-default.txt-Results] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[schema-select_reordered-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-min-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-some-default.txt-Results] >> test.py::test[window-leading/aggregations--Results] [GOOD] >> test.py::test[window-rank/opt--ForceBlocks] >> test.py::test[key_filter-is_null--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null--Results] >> test.py::test[join-inner_trivial--Results] [GOOD] >> test.py::test[join-inner_trivial-off-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-ForceBlocks] >> test.py::test[select-exists_true-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_true-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--ForceBlocks] >> test.py::test[join-yql-14847--Results] [GOOD] >> test.py::test[join-yql-8131--ForceBlocks] [SKIPPED] >> test.py::test[join-yql-8131--Results] [SKIPPED] >> test.py::test[key_filter-contains_tuples-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_expr_mul_cols--Results] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt-ForceBlocks] >> test.py::test[join-inner_all_right--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-ForceBlocks] >> test.py::test[select-trivial_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_multi_val-default.txt-Results] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Results] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-ForceBlocks] >> test.py::test[dq-precompute_result-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-precompute_result-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] >> test.py::test[aggr_factory-sum_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-sum_if-default.txt-Results] >> test.py::test[aggr_factory-top-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Results] >> test.py::test[blocks-add_uint32--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint32--Results] >> test.py::test[pg-select_from_columns-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q21-default.txt-Results] >> test.py::test[hor_join-max_in_tables--Results] [GOOD] >> test.py::test[hor_join-merge_multiouts_all--Results] [SKIPPED] >> test.py::test[hor_join-sorted_out--Results] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt--Results] >> test.py::test[like-like_clause_no_pattern-default.txt-ForceBlocks] >> test.py::test[column_order-winfunc-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_and_join--ForceBlocks] >> test.py::test[aggregate-avg_interval-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-Results] >> test.py::test[select-exists_true-default.txt-Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_inner_2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_2o--Results] >> test.py::test[aggregate-group_by_gs_flatten-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--ForceBlocks] >> test.py::test[window-distinct_over_window_full_frames--Results] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] >> test.py::test[blocks-add_uint32--Results] [GOOD] >> test.py::test[blocks-add_uint8--ForceBlocks] >> test.py::test[pg-select_from_columns_star-default.txt-Results] [GOOD] >> test.py::test[pg-select_where-default.txt-Results] >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-one-ForceBlocks] >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Results] >> test.py::test[join-yql_465--Results] [GOOD] >> test.py::test[key_filter-decimal--Results] >> test.py::test[insert-replace_ordered_by_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] >> test.py::test[aggr_factory-sum_if-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByOneField--ForceBlocks] >> test.py::test[join-premap_context_dep-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_context_dep-off-Results] [SKIPPED] >> test.py::test[join-pullup_cross-off-ForceBlocks] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_avg--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_avg--Results] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] |97.6%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part8/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord--Results] [SKIPPED] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] >> test.py::test[join-inner_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_dup_key-off-ForceBlocks] >> test.py::test[join-join_without_correlation_names--ForceBlocks] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_val_then_else-default.txt-Results] [GOOD] >> test.py::test[case-case_when_then-default.txt-Results] >> test.py::test[join-lookupjoin_inner_2o--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-ForceBlocks] >> test.py::test[window-lagging/aggregations--Results] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] >> test.py::test[pg-tpcds-q21-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-Results] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] [SKIPPED] >> test.py::test[table_range-concat_sorted_with_key_diff--ForceBlocks] >> test.py::test[join-inner_all_right--ForceBlocks] [GOOD] >> test.py::test[join-inner_all_right--Results] >> test.py::test[window-current/ansi_current_with_win--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Results] >> test.py::test[window-full/session_aliases--Results] [GOOD] >> test.py::test[window-mixed/aggregations--ForceBlocks] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] [GOOD] >> test.py::test[insert-select_subquery--ForceBlocks] >> test.py::test[pg-tpcds-q07-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt-Results] >> test.py::test[aggregate-compare_by_tuple--Results] [GOOD] >> test.py::test[aggregate-dedup_state_keys--Results] >> test.py::test[in-in_with_list_dict-default.txt-ForceBlocks] >> test.py::test[aggr_factory-top-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-ForceBlocks] >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[join-pullup_random-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key--Results] >> test.py::test[blocks-add_uint64_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Results] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> test.py::test[aggr_factory-avg-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-Results] |97.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[join-mapjoin_on_tablerecord--Results] [SKIPPED] >> test.py::test[join-pullup_extend--Results] >> test.py::test[like-like_clause_no_pattern-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] >> test.py::test[distinct-distinct_and_join--ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_and_join--Results] |97.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_avg--Results] [GOOD] >> test.py::test[blocks-json_document_type--ForceBlocks] >> test.py::test[select-literal_bool-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] >> test.py::test[hor_join-sorted_out--Results] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-Results] >> test.py::test[pg-tpcds-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q18-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_session_extended_subset--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--Results] >> test.py::test[blocks-add_uint8--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint8--Results] >> test.py::test[aggr_factory-some-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-stddev-default.txt-Results] >> test.py::test[pg-select_where-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-Results] >> test.py::test[window-rank/opt--ForceBlocks] [GOOD] >> test.py::test[window-rank/opt--Results] >> test.py::test[join-inner_all_right--Results] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off-ForceBlocks] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gb_ru--ForceBlocks] >> test.py::test[select-trivial_where-one-ForceBlocks] [GOOD] >> test.py::test[select-trivial_where-one-Results] >> test.py::test[blocks-combine_all_count_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_max_filter--Results] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Results] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] [GOOD] >> test.py::test[limit-limit_offset-default.txt-ForceBlocks] >> test.py::test[aggregate-GroupByOneField--ForceBlocks] [GOOD] >> test.py::test[aggregate-GroupByOneField--Results] >> test.py::test[blocks-add_uint8--Results] [GOOD] >> test.py::test[blocks-coalesce_bools--ForceBlocks] >> test.py::test[select-literal_bool-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect--ForceBlocks] >> test.py::test[key_filter-contains_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--Results] >> test.py::test[window-current/ansi_current_with_win--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--ForceBlocks] >> test.py::test[window-generic/aggregations_before_current--Results] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] |97.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part8/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[case-case_when_then-default.txt-Results] [GOOD] >> test.py::test[column_group-groups-max-Results] [SKIPPED] >> test.py::test[column_group-groups-perusage-Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups2_fail--Results] [SKIPPED] >> test.py::test[column_order-align_publish--Results] >> test.py::test[limit-yql-8611_calc_peephole--Results] [GOOD] >> test.py::test[lineage-group_by_asstruct_key-default.txt-Results] [SKIPPED] >> test.py::test[lineage-list_literal2-default.txt-Results] [SKIPPED] >> test.py::test[lineage-nested_lambda_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt-Results] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] >> test.py::test[join-pullup_cross-off-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_simple--Results] >> test.py::test[join-pullup_cross-off-Results] [SKIPPED] >> test.py::test[join-pullup_left_semi-off-ForceBlocks] >> test.py::test[distinct-distinct_and_join--Results] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt-ForceBlocks] >> test.py::test[lineage-select_all_filter-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset--Results] >> test.py::test[select-trivial_where-one-Results] [GOOD] >> test.py::test[select-unlabeled--ForceBlocks] >> test.py::test[join-mapjoin_dup_key-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_any_no_join_reduce-off-ForceBlocks] >> test.py::test[table_range-concat_sorted_with_key_diff--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] >> test.py::test[join-lookupjoin_inner_empty_subq-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side-off-ForceBlocks] >> test.py::test[aggregate-group_by_session_extended_subset--Results] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--ForceBlocks] >> test.py::test[join-join_without_correlation_names--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--ForceBlocks] >> test.py::test[in-in_with_list_dict-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_list_dict-default.txt-Results] >> test.py::test[join-join_without_correlation_names--Results] >> test.py::test[insert-select_subquery--ForceBlocks] [GOOD] >> test.py::test[insert-select_subquery--Results] >> test.py::test[key_filter-datetime-default.txt-Results] [GOOD] >> test.py::test[lineage-list_literal2-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal2-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_session-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_session-default.txt-Results] [SKIPPED] >> test.py::test[pg-tpcds-q23-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q24-default.txt-Results] |97.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[key_filter-contains_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Results] >> test.py::test[aggregate-GroupByOneField--Results] [GOOD] >> test.py::test[aggregate-dedup_state_keys--ForceBlocks] >> test.py::test[key_filter-decimal--Results] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-Results] >> test.py::test[join-pullup_extend--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted--ForceBlocks] >> test.py::test[blocks-json_document_type--ForceBlocks] [GOOD] >> test.py::test[blocks-json_document_type--Results] >> test.py::test[pg-tpcds-q11-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q68-default.txt-Results] >> test.py::test[aggregate-dedup_state_keys--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-Results] >> test.py::test[in-in_types_cast_all-default.txt-Results] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-Results] >> test.py::test[pg-tpcds-q18-default.txt-ForceBlocks] [GOOD] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q18-default.txt-Results] >> test.py::test[pg-drop_table--ForceBlocks] |97.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[lineage-window_session-default.txt-Results] [SKIPPED] >> test_ydb_backup.py::TestDatabaseBackup::test_database_backup [GOOD] >> test.py::test[in-in_with_list_dict-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] >> test.py::test[insert-drop_sortness-calc-ForceBlocks] >> test.py::test[insert-select_subquery--Results] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-ForceBlocks] >> test.py::test[window-rank/opt--Results] [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-ForceBlocks] >> test.py::test[select-sampleselect--ForceBlocks] [GOOD] >> test.py::test[select-sampleselect--Results] >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off-Results] [SKIPPED] >> test.py::test[join-left_only_with_other--ForceBlocks] >> test.py::test[join-join_without_correlation_names--Results] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-ForceBlocks] >> test.py::test[join-left_join_right_pushdown_no_opt--ForceBlocks] >> test.py::test[blocks-combine_all_max_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_some--Results] >> test.py::test[limit-limit_offset-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-limit_offset-default.txt-Results] >> test.py::test[window-mixed/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-mixed/aggregations--Results] >> test.py::test[blocks-coalesce_bools--ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_bools--Results] >> test.py::test[pg-tpcds-q18-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q21-default.txt-ForceBlocks] >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test.py::test[window-distinct_over_window_full_frames--Results] [GOOD] >> test.py::test[window-empty/aggregations--ForceBlocks] >> test.py::test[blocks-json_document_type--Results] [GOOD] >> test.py::test[blocks-pg_to_strings--ForceBlocks] >> test.py::test[action-action_eval_cluster_table_for--ForceBlocks] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] |97.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect--Results] [GOOD] >> test.py::test[select-unlabeled--ForceBlocks] [GOOD] >> test.py::test[join-pullup_left_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_left_semi-off-Results] [SKIPPED] >> test.py::test[distinct-distinct_by_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt-Results] >> test.py::test[join-opt_on_opt_side-off-ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber-off-ForceBlocks] >> test.py::test[select-simple_struct_field_access--ForceBlocks] >> test.py::test[pg-tpcds-q24-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q40-default.txt-Results] >> test.py::test[join-mergejoin_any_no_join_reduce-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_simple--Results] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-ForceBlocks] |97.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test.py::test[column_order-align_publish--Results] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset--Results] [GOOD] >> test.py::test[column_order-ordered_plus_native--Results] >> test.py::test[optimizers-fuse_map_mapreduce--Results] [SKIPPED] >> test.py::test[optimizers-length_over_merge--Results] >> test.py::test[pg-tpcds-q68-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-Results] >> test.py::test[aggregate-group_by_mul_gb_ru--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_mul_gb_ru--Results] >> test.py::test[limit-limit_offset-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_list_nested_lambda--ForceBlocks] |97.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[select-unlabeled--ForceBlocks] [GOOD] >> test.py::test[lineage-flatten_list_nested_lambda--ForceBlocks] [SKIPPED] >> test.py::test[lineage-flatten_list_nested_lambda--Results] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-percentile_and_avg_grouped--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] >> test.py::test[blocks-coalesce_bools--Results] [GOOD] >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--ForceBlocks] >> test.py::test[dq-precompute_parallel_mix--Results] |97.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[join-pullup_left_semi-off-Results] [SKIPPED] >> test.py::test[aggregate-dedup_state_keys--ForceBlocks] [GOOD] >> test.py::test[aggregate-dedup_state_keys--Results] >> test.py::test[dq-precompute_parallel_mix--Results] [SKIPPED] >> test.py::test[dq-wrong_script_segf--ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Results] [SKIPPED] >> test.py::test[epochs-write_and_use_in_same_epoch--ForceBlocks] >> test.py::test[aggr_factory-udaf-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table2-default.txt-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted--ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted--Results] >> test.py::test[in-in_with_tuple-default.txt-Results] [GOOD] >> test.py::test[in-yql-14677-default.txt-Results] |97.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[lineage-select_field_rename-default.txt-Results] [SKIPPED] >> test.py::test[key_filter-dict_contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] >> test.py::test[blocks-combine_all_pg_filter--Results] [GOOD] >> test.py::test[blocks-date_top_sort--Results] >> test.py::test[pg-drop_table--ForceBlocks] [GOOD] >> test.py::test[pg-drop_table--Results] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-on-Results] [SKIPPED] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] >> test.py::test[insert-drop_sortness-calc-ForceBlocks] [GOOD] >> test.py::test[insert-drop_sortness-calc-Results] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] >> test.py::test[tpch-q8-default.txt-Results] >> test.py::test[aggr_factory-stddev-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_by_tuple-default.txt-Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] >> test.py::test[dq-blacklisted_pragmas1--ForceBlocks] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] |97.7%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part7/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[flatten_by-flatten_with_join--Results] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] >> test.py::test[pg-tpcds-q21-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q21-default.txt-Results] >> test.py::test[pg-tpcds-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] >> test.py::test[pg-drop_table--Results] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> test.py::test[join-star_join_inners_premap--ForceBlocks] >> test.py::test[aggregate-dedup_state_keys--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_duo--ForceBlocks] >> test.py::test[pg-tpcds-q40-default.txt-Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_no_opt--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] >> test.py::test[insert-drop_sortness-calc-Results] [GOOD] >> test.py::test[insert-replace_inferred_op--ForceBlocks] >> test.py::test[window-mixed/aggregations--Results] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--ForceBlocks] >> test.py::test[window-leading/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-win_func_rank_by_part--Results] >> test.py::test[join-left_only_with_other--ForceBlocks] [GOOD] >> test.py::test[join-left_only_with_other--Results] >> test.py::test[select-simple_struct_field_access--ForceBlocks] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] [GOOD] >> test.py::test[select-simple_struct_field_access--Results] >> test.py::test[aggregate-percentiles_grouped_expr--ForceBlocks] >> test.py::test[pg-tpcds-q21-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q47-default.txt-ForceBlocks] >> test.py::test[action-action_eval_cluster_table_for--ForceBlocks] [GOOD] >> test.py::test[action-action_eval_cluster_table_for--Results] >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q40-default.txt-ForceBlocks] >> test.py::test[optimizers-length_over_merge--Results] [GOOD] >> test.py::test[optimizers-multi_to_empty_constraint--Results] >> test.py::test[window-win_by_all_percentile_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] >> test.py::test[action-eval_atom_wrong_type_param--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_param--Results] [SKIPPED] >> test.py::test[action-evaluate_match_type-default.txt-ForceBlocks] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--ForceBlocks] >> test.py::test[blocks-combine_all_some--Results] [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_hashed_min--Results] >> test.py::test[pg-tpcds-q80-default.txt-Results] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[pg-tpcds-q40-default.txt-Results] [GOOD] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[window-empty/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-empty/aggregations--Results] >> test.py::test[select-simple_struct_field_access--Results] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_binding--Results] >> test.py::test[select-sum_to_string-default.txt-ForceBlocks] >> test.py::test[column_order-ordered_plus_native--Results] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] >> test.py::test[blocks-pg_to_strings--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_strings--Results] >> test.py::test[join-pullup_rownumber-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_rownumber-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off-ForceBlocks] >> test.py::test[blocks-combine_all_minmax_double--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort--Results] >> test.py::test[blocks-combine_all_minmax_double--Results] >> test.py::test[in-yql-14677-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gb_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--ForceBlocks] >> test.py::test[aggregate-agg_phases_table2-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--ForceBlocks] >> test.py::test[action-action_eval_cluster_table_for--Results] [GOOD] >> test.py::test[action-action_nested_query-default.txt-ForceBlocks] >> test.py::test[key_filter-dict_contains_optional--Results] [GOOD] >> test.py::test[key_filter-is_null_with_condition--Results] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo_opt--ForceBlocks] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[in-yql-14677-default.txt-Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas1--ForceBlocks] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-blacklisted_pragmas1--Results] [SKIPPED] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] >> test.py::test[hor_join-group_ranges--ForceBlocks] >> test.py::test[union_all-union_all_trivial-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_minmax_double--Results] [GOOD] >> test.py::test[blocks-date_equals_scalar--ForceBlocks] >> test.py::test[sampling-bind_expr_subquery-default.txt-ForceBlocks] >> test.py::test[join-left_only_with_other--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--ForceBlocks] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_stat--ForceBlocks] >> test.py::test[blocks-pg_to_strings--Results] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--ForceBlocks] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] [GOOD] >> test.py::test[in-huge_in-default.txt-Results] >> test.py::test[optimizers-multi_to_empty_constraint--Results] [GOOD] >> test.py::test[pg-pg_column_case--Results] >> test.py::test[pg-tpcds-q40-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q40-default.txt-Results] |97.8%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part7/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join--Results] >> test.py::test[bigdate-table_arithmetic-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-ForceBlocks] >> test.py::test[blocks-date_top_sort--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] >> test.py::test[pg-tpcds-q80-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-Results] >> test.py::test[insert-replace_inferred_op--ForceBlocks] [GOOD] >> test.py::test[insert-replace_inferred_op--Results] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-ForceBlocks] >> test.py::test[action-eval_sample-] [SKIPPED] >> test.py::test[action-export_action-] [SKIPPED] >> test.py::test[action-insert_each_from_folder-] [SKIPPED] >> test.py::test[aggregate-agg_filter_pushdown-] >> test.py::test[window-empty/aggregations--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag--ForceBlocks] >> test.py::test[join-star_join_inners_premap--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_premap--Results] >> test.py::test[join-mergejoin_narrows_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort-off-ForceBlocks] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-ForceBlocks] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[optimizers-multi_to_empty_constraint--Results] [GOOD] >> test.py::test[epochs-write_and_use_in_same_epoch--ForceBlocks] [GOOD] >> test.py::test[epochs-write_and_use_in_same_epoch--Results] >> test.py::test[action-evaluate_match_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-evaluate_match_type-default.txt-Results] >> test.py::test[binding-table_regexp_binding--Results] [GOOD] >> test.py::test[blocks-block_input--Results] [SKIPPED] >> test.py::test[blocks-combine_all_pg--Results] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] [GOOD] >> test.py::test[window-yql-15636-default.txt-Results] >> test.py::test[weak_field-weak_field_esc_yson--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--Results] >> test.py::test[aggregate-aggrs_no_grouping--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping--Results] >> test.py::test[pg-tpcds-q40-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-ForceBlocks] >> test.py::test[column_order-union_all_positional_columns_count_fail--ForceBlocks] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] [GOOD] >> test.py::test[count-count_distinct_from_view_concat--ForceBlocks] >> test.py::test[select-sum_to_string-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-sum_to_string-default.txt-Results] >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> LeaderElectionTests::Test1 >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets >> test.py::test[join-selfjoin_on_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter--ForceBlocks] >> test.py::test[insert-replace_inferred_op--Results] [GOOD] >> test.py::test[join-count_bans--ForceBlocks] >> test.py::test[aggregate-percentiles_grouped_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr--Results] >> test.py::test[tpch-q8-default.txt-Results] [GOOD] >> test.py::test[tpch-q9-default.txt-Results] >> test.py::test[aggregate-group_by_cube_duo--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_duo--Results] >> test.py::test[blocks-combine_hashed_min--Results] [GOOD] >> test.py::test[key_filter-is_null_with_condition--Results] [GOOD] >> test.py::test[key_filter-no_bypass_merge--Results] [SKIPPED] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] >> test.py::test[blocks-combine_hashed_sum_many_keys--Results] >> test.py::test[pg-tpcds-q47-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_limit_offset_reorder-default.txt-Results] [GOOD] >> test.py::test[column_order-select_plain-default.txt-Results] >> test.py::test[action-action_nested_query-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-action_nested_query-default.txt-Results] >> test.py::test[action-table_content_before_from_folder-] >> test.py::test[window-win_func_rank_by_part--Results] [GOOD] >> test.py::test[window-win_func_spec_with_part--Results] >> test.py::test[window-win_func_order_by_udf_empty_rank--ForceBlocks] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] >> test.py::test[action-table_content_before_from_folder-] [SKIPPED] >> test.py::test[aggr_factory-bitand-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_expr_mul_col-] >> test.py::test[action-evaluate_match_type-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_esc_yson--Results] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-ForceBlocks] >> test.py::test[hor_join-group_ranges--ForceBlocks] [GOOD] >> test.py::test[hor_join-group_ranges--Results] >> test.py::test[select-sum_to_string-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-ForceBlocks] >> test.py::test[epochs-write_and_use_in_same_epoch--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-ForceBlocks] >> test.py::test[union_all-union_all_trivial-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Results] >> test.py::test[aggregate-group_by_cube_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[pg-tpcds-q47-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-action_nested_query-default.txt-Results] [GOOD] >> test.py::test[action-insert_each_from_folder--ForceBlocks] >> test.py::test[pg-tpcds-q87-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q91-default.txt-Results] >> test.py::test[aggregate-agg_filter_pushdown-] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping-] >> test.py::test[join-star_join_inners_premap--Results] [GOOD] >> test.py::test[join-star_join_inners_premap-off-ForceBlocks] >> test.py::test[join-lookupjoin_with_cache-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_dup_key--ForceBlocks] >> test.py::test[aggr_factory-boolor-default.txt] [SKIPPED] >> test.py::test[aggregate-GroupByOneField-] >> test.py::test[aggregate-group_by_rollup_duo_opt--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] >> test.py::test[pg-select_unionall_self-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--ForceBlocks] [GOOD] >> test.py::test[pg-pg_column_case--Results] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_star--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Results] >> test.py::test[sampling-bind_expr_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-Results] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] [GOOD] >> test.py::test[window-win_with_cur_row--ForceBlocks] >> test.py::test[hor_join-group_ranges--Results] [GOOD] >> test.py::test[hor_join-skip_yamr--ForceBlocks] >> test.py::test[pg-tpcds-q79-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-Results] >> test.py::test[pg-select_unionall_self-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-ForceBlocks] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[bigdate-table_arithmetic-default.txt-Results] [GOOD] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_stat--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_stat--Results] >> test.py::test[count-count_distinct_from_view_concat--ForceBlocks] [GOOD] >> test.py::test[count-count_distinct_from_view_concat--Results] >> test.py::test[aggregate-percentiles_grouped_expr--Results] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-ForceBlocks] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] [GOOD] >> test.py::test[sampling-sample-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_cube_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite_star--Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct--ForceBlocks] >> test.py::test[pg-tpcds-q79-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-ForceBlocks] >> test.py::test[aggregate-GroupByOneField-] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt] >> test.py::test[in-huge_in-default.txt-Results] [GOOD] >> test.py::test[insert-append_after_replace-default.txt-Results] >> test.py::test[aggregate-group_by_expr_mul_col-] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join-] >> test.py::test[join-selfjoin_on_sorted_with_filter--ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--ForceBlocks] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] [GOOD] >> test.py::test[aggregate-group_by_session_compact--ForceBlocks] >> test.py::test[pg-tpcds-q91-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q96-default.txt-Results] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] >> test.py::test[aggregate-aggrs_no_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-ForceBlocks] >> test.py::test[action-subquery_merge_nested_world-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] >> test.py::test[count-count_distinct_from_view_concat--Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--ForceBlocks] >> test.py::test[aggregate-group_by_expr_semi_join--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_few_empty--Results] >> test.py::test[join-count_bans--ForceBlocks] [GOOD] >> test.py::test[join-count_bans--Results] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] >> test.py::test[tpch-q9-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt--Results] >> test.py::test[join-mapjoin_dup_key--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] >> test.py::test[aggregate-aggrs_no_grouping-] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio-] >> test.py::test[window-full/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations_leadlag--Results] >> test.py::test[window-win_func_aggr_stat--Results] [GOOD] >> test.py::test[hor_join-skip_yamr--ForceBlocks] [GOOD] >> test.py::test[column_order-select_plain-default.txt-Results] [GOOD] >> test.py::test[count-count_distinct_from_view_concat--Results] >> test.py::test[union_all-union_all_with_limits-default.txt-ForceBlocks] [GOOD] >> tool::import_test [GOOD] >> test.py::test[aggregate-group_by_expr_with_join-] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum-] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] >> test.py::test[action-insert_each_from_folder--ForceBlocks] [GOOD] >> test.py::test[action-insert_each_from_folder--Results] >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Results] |97.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-ForceBlocks] >> test.py::test[join-star_join_inners_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_premap-off-Results] |97.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/import_test >> tool::import_test [GOOD] |97.9%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/test-results/pytest/{meta.json ... results_accumulator.log} |97.9%| [TS] {RESULT} ydb/tests/stability/tool/import_test >> test.py::test[window-win_func_spec_with_part--Results] [GOOD] >> test.py::test[join-star_join_inners_premap-off-Results] [SKIPPED] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-ForceBlocks] >> test.py::test[select-tablename_with_table_row-default.txt-Results] [GOOD] >> test.py::test[select-uncorrelated_subqueries--ForceBlocks] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] |97.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[hor_join-skip_yamr--ForceBlocks] [GOOD] |97.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[window-win_func_aggr_stat--Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] >> TExportToS3WithRebootsTests::CancelOnSingleShardTableWithChangefeed [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt] [GOOD] >> test.py::test[aggregate-group_by_hop_list_key-] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_gs-] >> test.py::test[pg-tpcds-q96-default.txt-Results] [GOOD] >> test.py::test[pg-wide_sort--Results] >> test.py::test[pg-tpcds-q10-default.txt-Results] >> test.py::test[window-win_with_cur_row--ForceBlocks] [GOOD] >> test.py::test[sampling-sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-sample-default.txt-Results] >> test.py::test[window-win_with_cur_row--Results] >> test.py::test[insert-append_after_replace-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted--Results] >> test.py::test[join-mergejoin_saves_output_sort-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--Results] [SKIPPED] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[window-win_func_spec_with_part--Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 10:01:26] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 10:01:27] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 10:01:28] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 10:01:30] "GET /nested_library.sql.txt HTTP/1.1" 200 - >> test.py::test[join-mergejoin_with_different_key_names_nested-off-ForceBlocks] >> test.py::test[window-distinct_over_window_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] |97.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[action-insert_each_from_folder--Results] [GOOD] >> test.py::test[action-subquery-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_cube_expr_trio-] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count-] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-standalone_view_lambda--ForceBlocks] >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-Results] >> test.py::test[join-mapjoin_with_empty_struct--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct--Results] >> test_ydb_backup.py::TestClusterBackupRestore::test_cluster_backup_restore >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Results] >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> test.py::test[aggregate-avg_and_sum_by_value--ForceBlocks] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value--Results] >> test.py::test[bigdate-implicit_cast_callable-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] >> test.py::test[type_v3-decimal_yt--Results] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Results] >> test.py::test[join-count_bans--Results] [GOOD] >> test.py::test[join-grace_join2--ForceBlocks] >> test.py::test[pg-tpcds-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-ForceBlocks] >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> test.py::test[blocks-combine_all_pg--Results] [GOOD] >> test.py::test[blocks-combine_all_some_filter--Results] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> test.py::test[sampling-sample-default.txt-Results] [GOOD] >> test.py::test[sampling-sort-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_grouping_hum-] [GOOD] >> test.py::test[blocks-date_equals_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_equals_scalar--Results] >> test.py::test[aggregate-group_by_mul_gs_gs-] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order-] [SKIPPED] >> test.py::test[ansi_idents-join_using-default.txt] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 10:01:02] "GET /mylib.sql HTTP/1.1" 200 - >> test.py::test[aggregate-group_by_rollup_with_filter-] >> test.py::test[join-mapjoin_with_empty_struct--Results] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort_cross--ForceBlocks] >> test.py::test[aggregate-group_by_session_compact--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Results] >> test.py::test[window-win_with_cur_row--Results] [GOOD] >> test.py::test[ypath-empty_range--ForceBlocks] [SKIPPED] >> test_http_api.py::TestHttpApi::test_simple_analytics_query >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] >> test.py::test[ypath-empty_range--Results] [SKIPPED] >> test.py::test[datetime-date_tz_table_sort_desc--ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Results] >> test.py::test[count-count_distinct_from_view_concat--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] [GOOD] >> test.py::test[blocks-filter_partial_expr--Results] >> ReadUpdateWrite::Load >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-Results] >> test.py::test[action-combine_subqueries_with_table_param-default.txt] [SKIPPED] >> test.py::test[aggr_factory-bottom-default.txt] [SKIPPED] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt] >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> test.py::test[action-action_eval_cluster_table-] >> TopicSessionTests::TwoSessionWithoutPredicate >> test.py::test[action-action_eval_cluster_table-] [SKIPPED] >> test.py::test[action-action_nested_query-default.txt] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] [GOOD] >> test.py::test[window-win_extract_members-default.txt-ForceBlocks] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] >> KqpQueryService::ReplyPartLimitProxyNode >> test.py::test[aggregate-group_by_cube_join_count-] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten-default.txt] >> test.py::test[aggregate-avg_and_sum_by_value--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |97.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[ypath-empty_range--Results] [SKIPPED] |97.9%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test |98.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-uncorrelated_subqueries--ForceBlocks] [GOOD] >> test.py::test[select-uncorrelated_subqueries--Results] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] [GOOD] >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> test.py::test[pg-wide_sort--Results] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-ForceBlocks] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail--Results] [SKIPPED] >> test.py::test[produce-fuse_reduces_diff_sets--Results] >> test.py::test[window-win_multiaggr_list-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_with_filter-] [GOOD] >> test.py::test[aggregate-having_distinct_expr-] |98.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[count-count_distinct_from_view_concat--Results] [GOOD] >> test.py::test[insert-append_sorted--Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] |98.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[aggregate-avg_and_sum_by_value--Results] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Results] [GOOD] >> test.py::test[type_v3-split--Results] [SKIPPED] >> test.py::test[window-full/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-generic/session--ForceBlocks] >> test.py::test[aggr_factory-count_if-default.txt-Results] >> test.py::test[datetime-date_tz_table_sort_desc--Results] [GOOD] >> test.py::test[dq-truncate_local-default.txt-ForceBlocks] >> test.py::test[type_v3-uuid--Results] >> test.py::test[aggregate-group_by_session_compact--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--ForceBlocks] >> test.py::test[view-standalone_view_lambda--ForceBlocks] [GOOD] >> test.py::test[view-standalone_view_lambda--Results] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> test.py::test[lambda-lambda_simple-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_norename--ForceBlocks] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt] >> test.py::test[join-grace_join2--ForceBlocks] [GOOD] >> test.py::test[join-grace_join2--Results] [SKIPPED] >> test.py::test[join-inner_on_key_only-off-ForceBlocks] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test.py::test[action-subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_strict_binding--ForceBlocks] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-limit--ForceBlocks] >> test.py::test[pg-tpcds-q29-default.txt-ForceBlocks] [GOOD] |98.0%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[pg-tpcds-q29-default.txt-Results] >> test.py::test[ansi_idents-join_using-default.txt] [GOOD] >> test.py::test[bigdate-table_common_type-default.txt] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> test.py::test[pg-select_starref1-default.txt-Results] [GOOD] >> test.py::test[pg-table_func-default.txt-Results] >> test.py::test[blocks-combine_all_some_filter--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Results] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--ForceBlocks] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolve >> test.py::test[blocks-combine_hashed_sum_many_keys--Results] [GOOD] >> test.py::test[blocks-date_greater_scalar--Results] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> test_ctas.py::TestYtCtas::test_simple_ctast >> test.py::test[action-action_nested_query-default.txt] [GOOD] |98.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[lambda-lambda_simple-default.txt-Results] [GOOD] >> test.py::test[action-eval_atom_wrong_type_param-] >> test.py::test[aggregate-group_by_gs_flatten-default.txt] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names-] >> test.py::test[select-uncorrelated_subqueries--Results] [GOOD] >> test.py::test[action-eval_atom_wrong_type_param-] [SKIPPED] >> test.py::test[action-eval_like-] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail-] [SKIPPED] >> test.py::test[action-eval_unresolved_type_arg-default.txt] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelOnSingleShardTableWithChangefeed [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:136:2058] recipient: [1:110:2142] 2025-05-05T09:57:51.004217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-05-05T09:57:51.004259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:51.004265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-05-05T09:57:51.004270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-05-05T09:57:51.004276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-05-05T09:57:51.004280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-05-05T09:57:51.004290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-05-05T09:57:51.004305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-05-05T09:57:51.004407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-05-05T09:57:51.004488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-05-05T09:57:51.017593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-05-05T09:57:51.017620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-05-05T09:57:51.017730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ClickHouse, PostgreSQL, MySQL, Ydb, YT, Greenplum, MsSQLServer, Oracle, Logging, Solomon Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:191:2058] recipient: [1:15:2062] 2025-05-05T09:57:51.020903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-05-05T09:57:51.020949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-05-05T09:57:51.020983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-05-05T09:57:51.022100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-05-05T09:57:51.022151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-05-05T09:57:51.022281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:51.022327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-05-05T09:57:51.022875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:51.023200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:51.023212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:51.023270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-05-05T09:57:51.023278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:51.023285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-05-05T09:57:51.023314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-05-05T09:57:51.025123Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:133:2156] sender: [1:240:2058] recipient: [1:15:2062] 2025-05-05T09:57:51.048550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-05-05T09:57:51.048624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.048680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-05-05T09:57:51.048744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-05-05T09:57:51.048755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.049541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:51.049581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-05-05T09:57:51.049640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.049650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-05-05T09:57:51.049655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-05-05T09:57:51.049661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-05-05T09:57:51.050129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.050142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-05-05T09:57:51.050147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-05-05T09:57:51.050588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.050600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.050606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:51.050613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-05-05T09:57:51.051329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-05-05T09:57:51.051763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-05-05T09:57:51.051792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-05-05T09:57:51.051960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T09:57:51.051981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T09:57:51.051988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:51.052043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-05-05T09:57:51.052050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-05-05T09:57:51.052080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-05-05T09:57:51.052091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-05-05T09:57:51.052480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T09:57:51.052487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T09:57:51.052517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T09:57:51.052522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-05-05T09:57:51.052531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-05-05T09:57:51.052536Z node 1 :FLAT_TX_SCHEMESHARD I ... T_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-05-05T10:02:14.915661Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T10:02:14.915831Z node 445 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-05-05T10:02:14.915970Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T10:02:14.915999Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: false 2025-05-05T10:02:14.916006Z node 445 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-05-05T10:02:14.916097Z node 445 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.916114Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.916119Z node 445 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T10:02:14.916124Z node 445 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 7 2025-05-05T10:02:14.916131Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T10:02:14.916150Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-05-05T10:02:14.916715Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-05-05T10:02:14.916748Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000009 2025-05-05T10:02:14.916842Z node 445 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-05-05T10:02:14.916878Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 122 RawX2: 1911260448868 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-05-05T10:02:14.916887Z node 445 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000009, at schemeshard: 72057594046678944 2025-05-05T10:02:14.916918Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-05-05T10:02:14.916929Z node 445 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T10:02:14.916933Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T10:02:14.916939Z node 445 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-05-05T10:02:14.916942Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T10:02:14.916954Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T10:02:14.916964Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T10:02:14.916969Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-05-05T10:02:14.916976Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-05-05T10:02:14.916992Z node 445 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-05-05T10:02:14.916996Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-05-05T10:02:14.917008Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-05-05T10:02:14.917017Z node 445 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-05-05T10:02:14.917021Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-05-05T10:02:14.917025Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-05-05T10:02:14.917663Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.917710Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.918251Z node 445 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-05-05T10:02:14.918268Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-05-05T10:02:14.918315Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-05-05T10:02:14.918343Z node 445 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-05-05T10:02:14.918348Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [445:208:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-05-05T10:02:14.918354Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [445:208:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-05-05T10:02:14.918531Z node 445 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.918544Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.918549Z node 445 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T10:02:14.918554Z node 445 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-05-05T10:02:14.918560Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-05-05T10:02:14.918714Z node 445 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.918728Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.918732Z node 445 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-05-05T10:02:14.918736Z node 445 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-05-05T10:02:14.918740Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-05-05T10:02:14.918755Z node 445 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-05-05T10:02:14.918761Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [445:130:2154] 2025-05-05T10:02:14.918818Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-05-05T10:02:14.918823Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-05-05T10:02:14.918835Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-05-05T10:02:14.919357Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.919702Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-05-05T10:02:14.919726Z node 445 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-05-05T10:02:14.919737Z node 445 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-05-05T10:02:14.919748Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-05-05T10:02:14.919757Z node 445 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-05-05T10:02:14.919762Z node 445 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-05-05T10:02:14.919766Z node 445 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 1004, itemIdx# 4294967295 2025-05-05T10:02:14.920166Z node 445 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1004 2025-05-05T10:02:14.920226Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-05-05T10:02:14.920233Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-05-05T10:02:14.920305Z node 445 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-05-05T10:02:14.920322Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-05-05T10:02:14.920327Z node 445 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [445:991:2895] TestWaitNotification: OK eventTxId 1004 >> test.py::test[aggregate-group_by_cube_grouping--ForceBlocks] [GOOD] >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--Results] >> test_encryption.py::TestEncryption::test_simple_encryption >> test.py::test[sampling-sort-default.txt-ForceBlocks] [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> test.py::test[sampling-sort-default.txt-Results] >> test.py::test[pg-tpcds-q84-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-Results] >> test.py::test[view-standalone_view_lambda--Results] [GOOD] >> test.py::test[view-trivial_view--ForceBlocks] >> test.py::test[blocks-date_equals_scalar--Results] [GOOD] >> test.py::test[blocks-date_greater_or_equal--ForceBlocks] >> test.py::test[aggregate-having_distinct_expr-] [GOOD] >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> test.py::test[aggregate-list_after_group-default.txt] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> test.py::test[aggregate-group_by_gs_few_empty--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-Results] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolve [GOOD] >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> KqpFederatedQuery::ExecuteQueryWithExternalTableResolve >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test.py::test[action-eval_pragma-] >> test.py::test[pg-tpcds-q29-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-ForceBlocks] >> test.py::test[blocks-filter_partial_expr--Results] [GOOD] >> test.py::test[blocks-pg--Results] >> test.py::test[aggregate-aggregate_list_in_key-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-Results] >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency >> test.py::test[aggregate-ensure_count-default.txt] [GOOD] >> test.py::test[aggregate-group_by_tablerow_column-] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> test.py::test[action-eval_unresolved_type_arg-default.txt] [GOOD] >> test.py::test[action-nested_subquery-] [SKIPPED] >> test.py::test[action-select_from_subquery_with_orderby-default.txt] [SKIPPED] >> test.py::test[action-subquery-default.txt] [SKIPPED] >> test.py::test[aggr_factory-bitxor-default.txt] [SKIPPED] >> test.py::test[aggr_factory-sum_if-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_list-default.txt] [SKIPPED] >> test.py::test[aggr_factory-count_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-ForceBlocks] >> ServerRestartTest::RestartOnGetSession >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names-] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse-] >> test.py::test[action-subquery-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] >> test.py::test[pg-tpcds-q84-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q20-default.txt-ForceBlocks] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-ForceBlocks] >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> KqpFederatedQuery::ExecuteQueryWithExternalTableResolve [GOOD] >> KqpFederatedQuery::ExecuteScriptWithS3ReadNotCached >> test.py::test[bigdate-table_common_type-default.txt] [GOOD] >> test.py::test[sampling-sort-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_yt_key_filter-] >> test.py::test[sampling-subquery_sort-default.txt-ForceBlocks] >> test.py::test[aggregate-list_after_group-default.txt] [GOOD] >> test.py::test[aggregate-list_nullable-] >> test.py::test[bigdate-tz_table_yt_key_filter-] [SKIPPED] >> test.py::test[blocks-combine_all_max_filter_opt-] >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> test.py::test[window-win_multiaggr_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Results] |98.0%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[type_v3-uuid--Results] [GOOD] >> test.py::test[udf-udf--Results] >> test.py::test[dq-truncate_local-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-truncate_local-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_some_fail--ForceBlocks] >> test.py::test[action-eval_each_input_table-default.txt] [SKIPPED] >> test.py::test[action-eval_if_guard-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column-] >> BulkUpsert::BulkUpsert >> test.py::test[action-eval_pragma-] [GOOD] >> test.py::test[action-eval_range-] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> BasicExample::BasicExample >> test.py::test[action-eval_range-] [SKIPPED] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt] [SKIPPED] >> test.py::test[aggr_factory-hll-default.txt] [SKIPPED] >> test.py::test[aggr_factory-max_by-default.txt] [SKIPPED] >> test.py::test[aggr_factory-top_by-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt] >> test.py::test[window-win_extract_members-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_extract_members-default.txt-Results] >> test.py::test[produce-fuse_reduces_diff_sets--Results] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test.py::test[join-inner_on_key_only-off-ForceBlocks] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> test.py::test[join-inner_on_key_only-off-Results] [SKIPPED] >> test.py::test[insert-append_sorted-to_sorted_desc-Results] [GOOD] >> test.py::test[join-join_and_distinct_key--ForceBlocks] >> test.py::test[insert-append_with_read_udf_fail--Results] >> KqpFederatedQuery::ExecuteScriptWithS3ReadNotCached [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSource >> test.py::test[limit-limit--ForceBlocks] [GOOD] >> test.py::test[limit-limit--Results] >> test.py::test[view-trivial_view--ForceBlocks] [GOOD] >> test.py::test[view-trivial_view--Results] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt] >> BasicExample::BasicExample [GOOD] >> test.py::test[binding-table_regexp_strict_binding--ForceBlocks] [GOOD] >> test.py::test[binding-table_regexp_strict_binding--Results] >> test.py::test[aggregate-avg_interval-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct-] [SKIPPED] >> test.py::test[aggregate-group_by_with_where-default.txt] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected |98.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[produce-fuse_reduces_diff_sets--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-ForceBlocks] [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test.py::test[aggregate-list_nullable-] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt] >> test_yt_reading.py::TestYtReading::test_block_reading >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> test.py::test[expr-non_persistable_group_by_some_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_group_by_some_fail--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_str--ForceBlocks] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> test.py::test[bigdate-table_int_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> test.py::test[window-win_multiaggr_list-default.txt-Results] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--ForceBlocks] >> KqpFederatedQuery::ExecuteScriptWithDataSource [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSourceJoinYdb >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> test.py::test[aggregate-group_by_tablerow_column-] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] >> test.py::test[bigdate-table_yt_native-wo_compat] |98.0%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part9/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[limit-limit--Results] [GOOD] >> test.py::test[lineage-if_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-if_struct-default.txt-Results] [SKIPPED] >> test.py::test[blocks-combine_all_max_filter_opt-] [GOOD] >> test.py::test[blocks-combine_all_min_filter-] >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[bigdate-table_yt_native-wo_compat] [SKIPPED] >> test.py::test[blocks-combine_hashed_count_filter-] >> test.py::test[optimizers-combinebykey_fields_subset_range--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_column_reuse-] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping-] >> test.py::test[view-trivial_view--Results] [GOOD] >> test.py::test[weak_field-weak_field_opt--ForceBlocks] >> test.py::test[insert-append_with_read_udf_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_nonstrict_fail--Results] >> test.py::test[pg-tpcds-q61-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Results] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-ForceBlocks] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> test.py::test[aggregate-aggregate_with_deep_aggregated_column-] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg-] >> test.py::test[binding-table_regexp_strict_binding--Results] [GOOD] >> test.py::test[blocks-add_int8--ForceBlocks] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt] >> test.py::test[window-win_extract_members-default.txt-Results] [GOOD] >> test.py::test[window-yql-15636-default.txt-ForceBlocks] >> Backup::UuidValue |98.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt-Results] >> KqpFederatedQuery::ExecuteScriptWithDataSourceJoinYdb [GOOD] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolveCheckPragma >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregation_by_udf--Results] >> test.py::test[window-generic/session--ForceBlocks] [GOOD] >> test.py::test[window-generic/session--Results] >> test.py::test[udf-udf--Results] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] >> test.py::test[insert-keepmeta_nonstrict_fail--Results] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-Results] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |98.1%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> test.py::test[bigdate-table_arithmetic-default.txt] [GOOD] >> test.py::test[binding-named_node_corr_names-default.txt] >> test.py::test[pg-tpcds-q61-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-ForceBlocks] >> test.py::test[aggr_factory-top_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] >> test.py::test[aggregate-group_by_with_where-default.txt] [GOOD] >> test.py::test[aggregate-percentile_and_variance-] >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] >> test.py::test[blocks-combine_hashed_count_filter-] [GOOD] >> test.py::test[blocks-date_add_interval-] >> NodeIdDescribe::HasDistribution [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] >> test.py::test[aggregate-group_by_cube_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_delay--ForceBlocks] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> ReadUpdateWrite::Load [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping-] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg-] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |98.1%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test >> ConsistentIndexRead::InteractiveTx >> test.py::test[sampling-subquery_sort-default.txt-Results] [GOOD] >> test.py::test[sampling-system_sampling--ForceBlocks] [SKIPPED] >> test.py::test[sampling-system_sampling--Results] [SKIPPED] >> test.py::test[schema-copy-other-ForceBlocks] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> test.py::test[blocks-combine_all_min_filter-] [GOOD] >> test.py::test[blocks-combine_all_pg_filter-] >> test.py::test[aggregate-aggregate_by_one_column-default.txt] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested-] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolveCheckPragma [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSourceJoinYdbCheckPragma >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] [GOOD] >> test.py::test[tpch-q10-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_expr_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static-default.txt-Results] [SKIPPED] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> test.py::test[join-join_and_distinct_key--ForceBlocks] [GOOD] >> test.py::test[join-join_and_distinct_key--Results] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-Results] [GOOD] >> test.py::test[blocks-bitcast_block--ForceBlocks] >> TopicSessionTests::TwoSessionsWithOffsets >> test.py::test[binding-named_node_corr_names-default.txt] [GOOD] >> test.py::test[blocks-bitcast_scalar-] >> test.py::test[file-parse_file_in_select_as_str--ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_str--Results] >> test.py::test[aggregate-percentile_and_variance-] [GOOD] >> test.py::test[binding-table_concat_strict_binding-default.txt] [SKIPPED] >> test.py::test[binding-table_range_binding-default.txt] >> test.py::test[aggregate-group_by_hop_bad_delay--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_delay--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct_compact--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] >> test.py::test[binding-table_range_binding-default.txt] [SKIPPED] >> test.py::test[blocks-bitcast_block-] >> test.py::test[ypath-direct_read_from_dynamic--ForceBlocks] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--Results] >> test.py::test[blocks-date_add_interval-] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar-] >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] >> test.py::test[weak_field-weak_field_opt--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_opt--Results] >> test.py::test[pg-table_func-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt-Results] >> Backup::UuidValue [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> test.py::test[join-mergejoin_saves_output_sort_nested--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] |98.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[aggregate-group_by_hop_static-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg-] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt] >> test.py::test[blocks-add_int8--ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int8--Results] >> test.py::test[aggregate-aggregate_udf_nested-] [GOOD] >> test.py::test[aggregate-compare_by-] >> test.py::test[optimizers-combinebykey_fields_subset_range--ForceBlocks] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] [GOOD] >> test_workload.py::TestYdbWorkload::test[row] >> test.py::test[aggr_factory-bitand-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q20-default.txt-Results] >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> test.py::test[action-dep_world_action_quote-default.txt] |98.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-ForceBlocks] >> KqpFederatedQuery::ExecuteScriptWithDataSourceJoinYdbCheckPragma [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSourceAndTablePathPrefix >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] >> test.py::test[window-generic/session--Results] [GOOD] >> test.py::test[window-lagging/aggregations--ForceBlocks] |98.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% 0.021560s 30% 0.021560s 50% 0.021560s 90% 0.021560s 99% 0.021560s Write: 10% 0.023811s 30% 0.023811s 50% 0.023811s 90% 0.023811s 99% 0.023811s Write: 10% 0.025423s 30% 0.025423s 50% 0.025423s 90% 0.025423s 99% 0.025423s Write: 10% 0.026067s 30% 0.026067s 50% 0.026067s 90% 0.026067s 99% 0.026067s Write: 10% 0.040218s 30% 0.040218s 50% 0.040218s 90% 0.040218s 99% 0.040218s Write: 10% 0.040483s 30% 0.040483s 50% 0.040483s 90% 0.040483s 99% 0.040483s Write: 10% 0.040744s 30% 0.040744s 50% 0.040744s 90% 0.040744s 99% 0.040744s Write: 10% 0.037186s 30% 0.037186s 50% 0.037186s 90% 0.037186s 99% 0.037186s Write: 10% 0.034119s 30% 0.034119s 50% 0.034119s 90% 0.034119s 99% Write: 10% 0.039781s 30% 0.034119s 0.039781s 50% 0.039781s 90% 0.039781s 99% 0.039781s Write: 10% 0.036792s 30% 0.036792s 50% 0.036792s 90% 0.036792s 99% 0.036792s Write: 10% 0.037566s 30% 0.037566s 50% 0.037566s 90% 0.037566s 99% 0.037566s Write: 10% 0.041010sWrite: 10% 0.038534s 30% 0.038534s 50% 0.038534s 90% 0.038534s 99% 0.038534s 30% 0.041010s 50% 0.041010s 90% 0.041010s 99% 0.041010sWrite: 10% 0.035882s 30% 0.035882s 50% 0.035882s 90% 0.035882s 99% 0.035882s Write: 10% 0.039497s 30% 0.039497s 50% 0.039497s 90% 0.039497s 99% 0.039497s Write: 10% 0.041614s 30% 0.041614s 50% 0.041614s 90% 0.041614s 99% 0.041614s Write: 10% 0.039352s 30% 0.039352s 50% 0.039352s 90% 0.039352s 99% 0.039352s Write: 10% 0.038007s 30% 0.038007s 50% 0.038007s 90% 0.038007s 99% 0.038007s Write: 10% 0.037573s 30% 0.037573s 50% 0.037573s 90% 0.037573s 99% 0.037573s Write: 10% 0.034302s 30% 0.034302s 50% 0.034302s 90% 0.034302s 99% 0.034302s Write: 10% 0.043694s 30% 0.043694s 50% 0.043694s 90% 0.043694s 99% 0.043694s Write: 10% 0.048474s 30% 0.048474s 50% 0.048474s 90% 0.048474s 99% 0.048474s Write: 10% 0.040904s 30% 0.040904s 50% 0.040904s 90% 0.040904s 99% 0.040904s Write: 10% 0.039470s 30% 0.039470s 50% 0.039470s 90% 0.039470s 99% 0.039470s Write: 10% 0.035601s 30% 0.035601s 50% 0.035601s 90% 0.035601s 99% 0.035601s Write: 10% 0.034223s 30% 0.034223s 50% 0.034223s 90% 0.034223s 99% 0.034223s Write: 10% 0.029911s 30% 0.029911s 50% 0.029911s 90% 0.029911s 99% 0.029911s Write: 10% 0.025740s 30% 0.025740s 50% 0.025740s 90% 0.025740s 99% 0.025740s Write: 10% 0.040922s 30% 0.040922s 50% 0.040922s 90% 0.040922s 99% 0.040922s Write: 10% 0.036796s 30% 0.036796s 50% 0.036796s 90% 0.036796s 99% 0.036796s Write: 10% 0.036223s 30% 0.036223s 50% 0.036223s 90% 0.036223s 99% 0.036223s Write: 10% 0.045022s 30% 0.045022s 50% 0.045022s 90% 0.045022s 99% 0.045022s Write: 10% Write: 10% 0.039673s 30% 0.039673s 50% 0.039673s 90% 0.039673s 99% 0.039673s Write: 10% 0.039622s 30% 0.039622s 50% 0.039622s 90% 0.039622s 99% 0.039622s Write: 10% 0.026961s 30% 0.026961s 50% 0.026961s 90% 0.026961s 99% 0.026961s Write: 10% 0.038549s 30% 0.038549s 50% 0.038549s 90% 0.038549s 99% 0.038549s Write: 10% 0.033643s 30% 0.033643s 50% 0.033643s 90% 0.033643s 99% 0.033643s Write: 10% 0.027989s 30% 0.027989s 50% 0.027989s 90% 0.027989s 99% 0.027989s Write: 10% Write: 10% 0.013772s0.012771s 30% 0.012771s 50% 0.012771s 90% 0.012771s 99% 0.012771s 30% 0.013772s 50% 0.013772s 90% 0.013772s 99% 0.013772s Write: 10% 0.030545s 30% 0.030545s 50% 0.030545s 90% 0.030545s 99% 0.030545s Write: 10% 0.015254s 30% 0.015254s 50% 0.015254s 90% 0.015254s 99% 0.015254s 0.044648s 30% 0.044648s 50% 0.044648s 90% 0.044648s 99% 0.044648s Write: 10% 0.030955s 30% 0.030955s 50% 0.030955s 90% 0.030955s 99% 0.030955s Write: 10% 0.032110s 30% 0.032110s 50% 0.032110s 90% 0.032110s 99% 0.032110s Write: 10% 0.022511s 30% 0.022511s 50% 0.022511s 90% 0.022511s 99% 0.022511s Write: 10% 0.023056s 30% 0.023056s 50% 0.023056s 90% 0.023056s 99% 0.023056s Write: 10% 0.021521s 30% 0.021521s 50% 0.021521s 90% 0.021521s 99% 0.021521s Write: 10% 0.020819s 30% 0.020819s 50% 0.020819s 90% 0.020819s 99% 0.020819s Write: 10% 0.013754s 30% 0.013754s 50% 0.013754s 90% 0.013754s 99% 0.013754s Write: 10% 0.020506s 30% 0.020506s 50% 0.020506s 90% 0.020506s 99% 0.020506s Write: 10% 0.020472s 30% 0.020472s 50% Write: 10% 0.022382s 30% 0.022382s 50% 0.022382s 90% 0.022382s 99% 0.022382s Write: 10% 0.020882s 30% 0.020882s 50% 0.020882s 90% 0.020882s 99% 0.020882s 0.020472s 90% 0.020472s 99% 0.020472s Write: 10% 0.022887s 30% 0.022887s 50% 0.022887s 90% 0.022887s 99% 0.022887s Write: 10% 0.017572s 30% 0.017572s 50% 0.017572s 90% 0.017572s 99% 0.017572s Write: 10% 0.014904s 30% 0.014904s 50% 0.014904s 90% 0.014904s 99% 0.014904s Write: 10% 0.008269s 30% 0.008269s 50% 0.008269s 90% 0.008269s 99% 0.008269s Write: 10% 0.026687s 30% 0.026687s 50% 0.026687s 90% 0.026687s 99% 0.026687s Write: 10% 0.023363s 30% 0.023363s 50% 0.023363s 90% 0.023363s 99% 0.023363s Write: 10% 0.023710s 30% 0.023710s 50% 0.023710s 90% 0.023710s 99% 0.023710s Write: 10% 0.018196s 30% 0.018196s 50% 0.018196s 90% 0.018196s 99% 0.018196s Write: 10% 0.018208s 30% 0.018208s 50% 0.018208s 90% 0.018208s 99% 0.018208s Step 2. read write Write: 10% 0.011066s 30% 0.011066s 50% 0.011066s 90% 0.011066s 99% 0.011066s Write: 10% 0.014285s 30% 0.014285s 50% 0.014285s 90% 0.014285s 99% 0.014285s Write: 10% 0.014590s 30% 0.014590s 50% 0.014590s 90% 0.014590s 99% 0.014590s Write: 10% 0.031237sWrite: 10% 0.029041s 30% 0.029041s 50% 0.029041s 30% 0.031237s 50% 0.031237s 90% 0.031237s 99% 0.031237s 90% 0.029041s 99% 0.029041s Write: 10% 0.029478s 30% 0.029478s 50% 0.029478s 90% 0.029478s 99% 0.029478s Write: 10% 0.029746s 30% 0.029746s 50% 0.029746s 90% 0.029746s 99% 0.029746s Write: 10% 0.030829s 30% 0.030829s 50% 0.030829s 90% 0.030829s 99% 0.030829s Write: 10% 0.032328s 30% 0.032328s 50% 0.032328s 90% 0.032328s 99% 0.032328s Write: 10% 0.031998s 30% 0.031998s 50% 0.031998s 90% 0.031998s 99% 0.031998s Write: 10% Write0.035241s 30% 0.035241s 50% 0.035241s 90% 0.035241s 99% 0.035241s : 10% 0.035087s 30% 0.035087s 50% 0.035087s 90% 0.035087s 99% 0.035087s Write: 10% Write: 10% 0.035087s 30% 0.035087s 50% 0.035087s 90% 0.035087s 99% 0.035087s 0.036302s 30% 0.036302s 50% 0.036302s 90% 0.036302s 99% 0.036302s Write: 10% 0.035663s 30% 0.035663s 50% 0.035663s 90% 0.035663s 99% 0.035663s Write: 10% 0.038852s 30% 0.038852s 50% 0.038852s 90% 0.038852s 99% 0.038852s Write: 10% 0.041824s 30% 0.041824s 50% 0.041824s 90% 0.041824s 99% 0.041824s Write: 10% 0.041730s 30% 0.041730s 50% 0.041730s 90% 0.041730s 99% 0.041730s Write: 10% 0.041520s 30% 0.041520s 50% 0.041520s 90% 0.041520s 99% 0.041520s Write: 10% 0.044164s 30% 0.044164s 50% 0.044164s 90% 0.044164s 99% 0.044164s Write: 10% 0.043369s 30% 0.043369s 50% 0.043369s 90% 0.043369s 99% 0.043369s Write: 10% 0.043683s 30% 0.043683s 50% 0.043683s 90% 0.043683s 99% 0.043683s Write: 10% 0.045134s 30% 0.045134s 50% 0.045134s 90% 0.045134s 99% 0.045134s Write: 10% 0.044708s 30% 0.044708s 50% 0.044708s 90% 0.044708s 99% 0.044708s Write: 10% 0.044072s 30% 0.044072s 50% 0.044072s 90% 0.044072s 99% 0.044072s Write: 10% 0.043847s 30% 0.043847s 50% 0.043847s 90% 0.043847s 99% 0.043847s Write: 10% Write: 10% 0.044990s 30% 0.043158s 30% 0.043158s 50% 0.043158s 90% 0.043158s 99% 0.043158s 0.044990s 50% 0.044990s 90% 0.044990s 99% 0.044990s Write: 10% 0.044826s 30% 0.044826s 50% 0.044826s 90% 0.044826s 99% 0.044826s Write: 10% 0.045554s 30% 0.045554s 50% 0.045554s 90% 0.045554s 99% 0.045554s Write: 10% 0.047502s 30% 0.047502s 50% 0.047502s 90% 0.047502s 99% 0.047502s Write: 10% Write: 10% 0.048742s0.047914s 30% 0.047914sWrite: 10% 0.045445s 30% 0.045445s 50% 0.045445s 90% 0.045445s 99% 0.045445s 30% Write: 10% 50% 0.048742sWrite 50% 0.048742s 90% 0.048742s: 10% 0.043656s 30% 0.043656s 50% 0.045530s0.047914sWrite: 10% 0.044892s 30% 0.044892s 30% 90% 0.047914s 99% 0.047914s0.045530sWrite: 10% 0.045493s 30% 50% 0.043656s 50% 0.044892s 90% 0.044892s 99% 0.045493s 50% 0.044892s 90% 99% 0.045493s0.048742s0.043656s 99% 0.043656s 0.045530s 90% 0.045530s 99% 0.045530s 90% 0.045493s 99% 0.045493s Write: 10% 0.046443s 30% 0.046443s 50% 0.046443s 90% 0.046443s 99% 0.046443s Write: 10% 0.046831s 30% 0.046831s 50% 0.046831s 90% 0.046831s 99% 0.046831s Write: 10% 0.047569s 30% 0.047569s 50% 0.047569s 90% 0.047569s 99% 0.047569s Write: 10% 0.055795s 30% 0.055795s 50% 0.055795s 90% 0.055795s 99% 0.055795s Write: 10% 0.065921s 30% 0.065921s 50% 0.065921s 90% 0.065921s 99% 0.065921s Write: 10% 0.068707s 30% 0.068707s 50% 0.068707s 90% 0.068707s 99% 0.068707s Write: 10% 0.066973s 30% 0.066973s 50% 0.066973s 90% 0.066973s 99% 0.066973s Write: 10% 0.071262s 30% 0.071262s 50% 0.071262s 90% 0.071262s 99% 0.071262s Write: 10% 0.058840s 30% 0.058840s 50% 0.058840s 90% 0.058840s 99% 0.058840s Write: 10% 0.069269s 30% 0.069269s 50% 0.069269s 90% 0.069269s 99% 0.069269s Write: 10% 0.060083s 30% 0.060083s 50% 0.060083s 90% 0.060083s 99% 0.060083s Write: 10% 0.059059s 30% 0.059059s 50% 0.059059s 90% 0.059059s 99% 0.059059s Write: 10% 0.061862s 30% 0.061862s 50% 0.061862s 90% 0.061862s 99% 0.061862s Write: 10% 0.062999s 30% 0.062999s 50% 0.062999s 90% 0.062999s 99% 0.062999s Write: 10% 0.061147s 30% 0.061147s 50% 0.061147s 90% 0.061147s 99% 0.061147s Write: 10% 0.070706s 30% 0.070706s 50% 0.070706s 90% 0.070706s 99% 0.070706s Write: 10% 0.059760s 30% 0.059760s 50% 0.059760s 90% 0.059760s 99% 0.059760s Write: 10% 0.064175s 30% 0.064175s 50% 0.064175s 90% 0.064175s 99% 0.064175s Write: 10% 0.060896s 30% 0.060896s 50% 0.060896s 90% 0.060896s 99% 0.060896s Write: 10% 0.062808s 30% 0.062808s 50% 0.062808s 90% 0.062808s 99% 0.062808s Write: 10% 0.064525s 30% 0.064525s 50% 0.064525s 90% 0.064525s 99% 0.064525s Write: 10% 0.062884s 30% 0.062884s 50% 0.062884s 90% 0.062884s 99% 0.062884s Write: 10% 0.061316s 30% 0.061316s 50% 0.061316s 90% 0.061316s 99% 0.061316s Write: 10% 0.061962s 30% 0.061962s 50% 0.061962s 90% 0.061962s 99% 0.061962s Write: 10% 0.069513s 30% 0.069513s 50% 0.069513s 90% 0.069513s 99% 0.069513s Write: 10% 0.069962s 30% 0.069962s 50% 0.069962s 90% 0.069962s 99% 0.069962s Read: 10% 0.305980s 30% 0.305980s 50% 0.305980s 90% 0.305980s 99% 0.305980s Step 3. write modify Write: 10% 0.011500s 30% 0.011500s 50% 0.011500s 90% 0.011500s 99% 0.011500s Write: 10% 0.013765s 30% 0.013765s 50% 0.013765s 90% 0.013765s 99% 0.013765s Write: 10% 0.017714s 30% 0.017714s 50% 0.017714s 90% 0.017714s 99% 0.017714s Write: 10% 0.031300s 30% 0.031300s 50% 0.031300s 90% 0.031300s 99% 0.031300s Write: 10% 0.043203s 30% 0.043203s 50% 0.043203s 90% 0.043203s 99% 0.043203s Write: 10% 0.038903s 30% 0.038903s 50% 0.038903s 90% 0.038903s 99% 0.038903s Write: 10% 0.032413s 30% 0.032413s 50% 0.032413s 90% 0.032413s 99% 0.032413s Write: 10% 0.034210s 30% 0.034210s 50% 0.034210s 90% 0.034210s 99% 0.034210s Write: 10% 0.032034s 30% 0.032034s 50% 0.032034s 90% 0.032034s 99% 0.032034s Write: 10% 0.042267s 30% 0.042267s 50% 0.042267s 90% 0.042267s 99% 0.042267s Write: 10% 0.040136s 30% 0.040136s 50% 0.040136s 90% 0.040136s 99% 0.040136s Write: 10% 0.046336s 30% 0.046336s 50% 0.046336s 90% 0.046336s 99% 0.046336s Write: 10% 0.043348s 30% 0.043348s 50% 0.043348s 90% 0.043348s 99% 0.043348s Write: 10% 0.038490s 30% 0.038490s 50% 0.038490s 90% 0.038490s 99% 0.038490s Write: 10% 0.043607s 30% 0.043607s 50% 0.043607s 90% 0.043607s 99% 0.043607s Write: 10% 0.030507s 30% 0.030507s 50% 0.030507s 90% 0.030507s 99% 0.030507s Write: 10% 0.035137s 30% Write: 10% 0.048447s 30% 0.048447s 50% 0.048447s 90% 0.035137s 50% 0.035137s 90% 0.035137s 99% 0.035137s 0.048447s 99% 0.048447s Write: 10% 0.035230s 30% 0.035230s 50% 0.035230s 90% 0.035230s 99% 0.035230s Write: 10% 0.053157s 30% 0.053157s 50% 0.053157s 90% 0.053157s 99% 0.053157s Write: 10% 0.048997s 30% 0.048997s 50% 0.048997s 90% 0.048997s 99% 0.048997s Write: 10% 0.043080s 30% 0.043080s 50% 0.043080s 90% 0.043080s 99% 0.043080s Write: 10% 0.042909s 30% 0.042909s 50% 0.042909s 90% 0.042909s 99% 0.042909s Write: 10% 0.038072s 30% 0.038072s 50% 0.038072s 90% 0.038072s 99% 0.038072s Write: 10% Write: 10% 0.043835s 30% 0.043835s 50% 0.043835s 90% 0.043835s 99% 0.043835s 0.019883s 30% 0.019883s 50% 0.019883s 90% 0.019883s 99% 0.019883s Write: 10% 0.023311s 30% 0.023311s 50% 0.023311s 90% 0.023311s 99% 0.023311s Write: 10% 0.018865s 30% 0.018865s 50% 0.018865s 90% 0.018865s 99% 0.018865s Write: 10% 0.042221s 30% 0.042221s 50% 0.042221s 90% 0.042221s 99% 0.042221s Write: 10% 0.062890s 30% 0.062890s 50% 0.062890s 90% 0.062890s 99% 0.062890s Write: 10% 0.062550s 30% 0.062550s 50% 0.062550s 90% 0.062550s 99% 0.062550s Write: 10% 0.061404s 30% 0.061404s 50% 0.061404s 90% 0.061404s 99% 0.061404sWrite: 10% 0.062860s 30% 0.062860s 50% 0.062860s 90% 0.062860s 99% 0.062860s Write: 10% 0.061982s 30% 0.061982s 50% 0.061982s 90% 0.061982s 99% 0.061982s Write: 10% 0.087280s 30% 0.087280s 50% 0.087280s 90% 0.087280s 99% 0.087280s Write: 10% 0.073168s 30% 0.073168s 50% 0.073168s 90% 0.073168s 99% 0.073168s Write: 10% 0.071705s 30% 0.071705s 50% 0.071705s 90% 0.071705s 99% 0.071705s Write: 10% 0.074945s 30% 0.074945s 50% 0.074945s 90% 0.074945s 99% 0.074945s Write: 10% 0.090729s 30% 0.090729s 50% 0.090729s 90% 0.090729s 99% 0.090729sWrite: 10% 0.087337s 30% 0.087337s 50% 0.087337s 90% 0.087337s 99% 0.087337s Write: 10% 0.103249s 30% 0.103249s 50% 0.103249s 90% 0.103249s 99% 0.103249s Write: 10% 0.096629s 30% 0.096629s 50% 0.096629s 90% 0.096629s 99% 0.096629s Write: 10% 0.094582s 30% 0.094582s 50% 0.094582s 90% 0.094582s 99% 0.094582s Write: 10% 0.096705s 30% 0.096705s 50% 0.096705s 90% 0.096705s 99% 0.096705s Write: 10% 0.097966s 30% 0.097966s 50% 0.097966s 90% 0.097966s 99% 0.097966s Write: 10% 0.102917s 30% 0.102917s 50% 0.102917s 90% 0.102917s 99% 0.102917s Write: 10% 0.109193s 30% 0.109193s 50% 0.109193s 90% 0.109193s 99% 0.109193s Write: 10% 0.130487s 30% 0.130487s 50% 0.130487s 90% 0.130487s 99% 0.130487s Write: 10% 0.096671s 30% 0.096671s 50% 0.096671s 90% 0.096671s 99% 0.096671s Write: 10% 0.098408s 30% 0.098408s 50% 0.098408s 90% 0.098408s 99% 0.098408s Write: 10% 0.096048s 30% 0.096048s 50% 0.096048s 90% 0.096048s 99% Write: 10% 0.100805s 30% 0.100805s 50% 0.100805s 90% 0.100805s 99% 0.100805s Write: 10% 0.096030s 30% 0.096030s 50% 0.096030s 90% 0.096030s 99% 0.096030s 0.096048s Write: 10% 0.101984s 30% 0.101984s 50% 0.101984s 90% 0.101984s 99% 0.101984s Write: 10% 0.106470s 30% 0.106470s 50% 0.106470s 90% 0.106470s 99% 0.106470s Write: 10% 0.108819s 30% 0.108819s 50% 0.108819s 90% 0.108819s 99% 0.108819s Write: 10% 0.098957s 30% 0.098957s 50% 0.098957s 90% 0.098957s 99% 0.098957s Write: 10% 0.101672s 30% 0.101672s 50% 0.101672s 90% 0.101672s 99% 0.101672s Write: 10% Write0.103748sWrite 30% : 10% 0.102602s 30% 0.102602s 50% 0.102602s 90% 0.102602s 99% 0.102602s Write: 10% 0.107912s 30% 0.107912s 50% 0.107912s 90% 0.107912s 99% 0.107912s Write: 10% 0.110240s 30% 0.110240s 50% 0.110240s 90% 0.110240s 99% 0.110240s 0.103748s 50% : 10% Write: 10% 0.101052s0.103748s 90% 30% 0.101052s 50% 0.101052s 90% 0.101052s 99% 0.101052s 0.100934s 30% 0.100934s 50% 0.100934s 90% 0.100934s 99% 0.100934s 0.103748s 99% 0.103748s Update: 10% 0.058966s 30% 0.058966s 50% 0.094913s 90% 0.094913s 99% 0.094913s Step 4. read modify write Write: 10% 0.089671s 30% 0.089671s 50% 0.089671s 90% 0.089671s 99% 0.089671s Write: 10% 0.120786s 30% 0.120786s 50% 0.120786s 90% 0.120786s 99% 0.120786s Write: 10% 0.129982s 30% 0.129982s 50% 0.129982s 90% 0.129982s 99% 0.129982s Write: 10% 0.130841s 30% 0.130841s 50% 0.130841s 90% 0.130841s 99% 0.130841s Write: 10% 0.130577s 30% 0.130577s 50% 0.130577s 90% 0.130577s 99% 0.130577s Write: 10% 0.137361s 30% 0.137361s 50% 0.137361s 90% 0.137361s 99% 0.137361s Write: 10% 0.139065s 30% 0.139065s 50% 0.139065s 90% 0.139065s 99% 0.139065s Write: 10% 0.138360s 30% 0.138360s 50% 0.138360s 90% 0.138360s 99% 0.138360s Write: 10% 0.142868s 30% 0.142868s 50% 0.142868s 90% 0.142868s 99% 0.142868s Write: 10% 0.146360s 30% 0.146360s 50% 0.146360s 90% 0.146360s 99% 0.146360s Write: 10% 0.145809s 30% 0.145809s 50% 0.145809s 90% 0.145809s 99% 0.145809s Write: 10% 0.148028s 30% 0.148028s 50% 0.148028s 90% 0.148028s 99% 0.148028s Write: 10% 0.150062s 30% 0.150062s 50% 0.150062s 90% 0.150062s 99% 0.150062s Write: 10% 0.150249s 30% 0.150249s 50% 0.150249s 90% 0.150249s 99% 0.150249s Write: 10% 0.153368s 30% 0.153368s 50% 0.153368s 90% 0.153368s 99% 0.153368s Write: 10% 0.179680s 30% 0.179680s 50% 0.179680s 90% 0.179680s 99% 0.179680s Write: 10% Write: 10% 0.177590s0.187234s 30% 0.177590s 50% 0.177590s 90% 0.177590s 99% 0.177590s 30% 0.187234s 50% 0.187234s 90% 0.187234s 99% 0.187234s Write: 10% 0.170020s 30% 0.170020s 50% 0.170020s 90% 0.170020s 99% 0.170020s Write: 10% 0.153168s 30% 0.153168s 50% 0.153168s 90% 0.153168s 99% 0.153168s Write: 10% 0.188137s 30% 0.188137s 50% 0.188137s 90% 0.188137s 99% 0.188137s Write: 10% 0.163024s 30% 0.163024s 50% 0.163024s 90% 0.163024s 99% 0.163024s Write: 10% 0.163092s 30% 0.163092s 50% 0.163092s 90% 0.163092s 99% 0.163092s Write: 10% 0.159690s 30% 0.159690s 50% 0.159690s 90% 0.159690s 99% 0.159690s Write: 10% 0.167383s 30% 0.167383s 50% 0.167383s 90% 0.167383s 99% 0.167383s Write: 10% 0.170677s 30% 0.170677s 50% 0.170677s 90% 0.170677s 99% 0.170677s Write: 10% 0.176327s 30% 0.176327s 50% 0.176327s 90% 0.176327s 99% 0.176327s Write: 10% 0.163901s 30% 0.163901s 50% 0.163901s 90% 0.163901s 99% 0.163901s Write: 10% 0.171662s 30% 0.171662s 50% 0.171662s 90% 0.171662s 99% 0.171662s Write: 10% 0.162017s 30% 0.162017s 50% 0.162017s 90% 0.162017s 99% 0.162017s Write: 10% 0.150912s 30% 0.150912s 50% 0.150912s 90% 0.150912s 99% 0.150912s Write: 10% 0.170448s 30% 0.170448s 50% 0.170448s 90% 0.170448s 99% 0.170448s Write: 10% 0.175107s 30% 0.175107s 50% 0.175107s 90% 0.175107s 99% 0.175107s Write: 10% 0.213414sWrite: 10% 0.173560s 30% 0.173560s 50% 0.173560s 90% 0.173560s 99% 0.173560s Write: 10% 0.171183s 30% 0.171183s 50% 0.171183s 90% 0.171183s 99% 0.171183s 30% 0.213414s 50% 0.213414s 90% 0.213414s 99% 0.213414s Write: 10% 0.169206s 30% 0.169206s 50% 0.169206s 90% 0.169206s 99% 0.169206s Write: 10% 0.172130s 30% 0.172130s 50% 0.172130s 90% 0.172130s 99% 0.172130s Write: 10% 0.166692s 30% 0.166692s 50% 0.166692s 90% 0.166692s 99% 0.166692s Write: 10% 0.175212s 30% 0.175212s 50% 0.175212s 90% 0.175212s 99% 0.175212s Write: 10% 0.167481s 30% 0.167481s 50% 0.167481s 90% 0.167481s 99% 0.167481s Write: 10% 0.120853s 30% 0.120853s 50% 0.120853s 90% 0.120853s 99% 0.120853s Write: 10% 0.175587s 30% 0.175587s 50% 0.175587s 90% 0.175587s 99% 0.175587s Write: 10% Write: 10% 0.159721s 30% 0.159721s 50% 0.159721s 90% 0.159721s 99% 0.159721s 0.174343s 30% 0.174343s 50% 0.174343s 90% 0.174343s 99% 0.174343s Write: 10% 0.137478s 30% 0.137478s 50% 0.137478s 90% 0.137478s 99% 0.137478s Write: 10% 0.158338s 30% 0.158338s 50% 0.158338s 90% 0.158338s 99% 0.158338s Write: 10% 0.159317s 30% 0.159317s 50% 0.159317s 90% 0.159317s 99% 0.159317s Write: 10% 0.132770s 30% 0.132770s 50% 0.132770s 90% 0.132770s 99% 0.132770s Write: 10% 0.161412s 30% 0.161412s 50% 0.161412s 90% 0.161412s 99% 0.161412s Write: 10% 0.159894s 30% 0.159894s 50% 0.159894s 90% 0.159894s 99% 0.159894s Write: 10% 0.133972s 30% 0.133972s 50% 0.133972s 90% 0.133972s 99% 0.133972s Write: 10% 0.131834s 30% 0.131834s 50% 0.131834s 90% 0.131834s 99% 0.131834s Write: 10% 0.130283s 30% 0.130283s 50% 0.130283s 90% 0.130283s 99% 0.130283s Write: 10% 0.129691s 30% 0.129691s 50% 0.129691s 90% 0.129691s 99% 0.129691s Write: 10% 0.132773s 30% 0.132773s 50% 0.132773s 90% 0.132773s 99% 0.132773s Write: 10% 0.132698s 30% 0.132698s 50% 0.132698s 90% 0.132698s 99% 0.132698s Write: 10% 0.135473s 30% 0.135473s 50% 0.135473s 90% 0.135473s 99% 0.135473s Write: 10% 0.133265s 30% 0.133265s 50% 0.133265s 90% 0.133265s 99% 0.133265s Write: 10% 0.137302s 30% 0.137302s 50% 0.137302s 90% 0.137302s 99% 0.137302s Write: 10% 0.136106s 30% 0.136106s 50% 0.136106s 90% 0.136106s 99% 0.136106s Write: 10% 0.135709s 30% 0.135709s 50% 0.135709s 90% 0.135709s 99% 0.135709s Write: 10% 0.137859s 30% 0.137859s 50% 0.137859s 90% 0.137859s 99% 0.137859s Write: 10% 0.140553s 30% 0.140553s 50% 0.140553s 90% 0.140553s 99% 0.140553s Update: 10% 0.046850s 30% 0.046850s 50% 0.137049s 90% 0.137049s 99% 0.137049s Read: 10% 0.557157s 30% 0.557157s 50% 0.557157s 90% 0.557157s 99% 0.557157s |98.1%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest |98.1%| [TM] {RESULT} ydb/tests/olap/high_load/unittest >> test.py::test[blocks-date_greater_or_equal--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_or_equal--Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-ForceBlocks] >> test.py::test[blocks-bitcast_scalar-] [GOOD] >> test.py::test[blocks-combine_all_max_filter-] >> test.py::test[file-parse_file_in_select_as_str--Results] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] >> test.py::test[aggregate-aggregation_by_udf--Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value--Results] |98.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part1/test-results/pytest/{meta.json ... results_accumulator.log} >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> test.py::test[weak_field-weak_field_opt--Results] [GOOD] >> test.py::test[weak_field-weak_field_rest--ForceBlocks] >> test.py::test[blocks-interval_add_interval_scalar-] [GOOD] >> test.py::test[blocks-json_document_type-] [SKIPPED] >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx-] >> test.py::test[blocks-add_int8--Results] [GOOD] >> test.py::test[blocks-block_input--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input--Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg--ForceBlocks] >> test.py::test[action-empty_do-default.txt] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDataSourceAndTablePathPrefix [GOOD] >> KqpFederatedQuery::ExecuteScriptWithDifferentBindingsMode >> test.py::test[aggregate-group_by_ru_join_agg-] [GOOD] >> test.py::test[aggregate-rollup_with_dict-] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] Test command err: 127.0.0.1 - - [05/May/2025 10:01:38] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 10:01:39] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [05/May/2025 10:01:40] "GET /foo.txt HTTP/1.1" 200 - >> test.py::test[blocks-combine_all_pg_filter-] [GOOD] >> test.py::test[blocks-combine_hashed_some-] |98.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part9/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-join_and_distinct_key--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right--ForceBlocks] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] [GOOD] >> test.py::test[optimizers-flatmap_with_non_struct_out--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-flatmap_with_non_struct_out--Results] [SKIPPED] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-ForceBlocks] |98.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[file-parse_file_in_select_as_str--Results] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] >> test.py::test[schema-copy-other-ForceBlocks] [GOOD] >> test.py::test[schema-copy-other-Results] >> test.py::test[blocks-bitcast_block-] [GOOD] >> test.py::test[blocks-block_input_sys_columns-] [SKIPPED] >> test.py::test[blocks-combine_hashed_minmax_double-] >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> test.py::test[blocks-bitcast_block--ForceBlocks] [GOOD] >> test.py::test[blocks-bitcast_block--Results] >> test.py::test[window-yql-15636-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-15636-default.txt-Results] >> test.py::test[insert-multiappend_sorted-default.txt-Results] [GOOD] >> test.py::test[insert-unique_distinct_hints--Results] >> test.py::test[pg-tpcds-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt] [GOOD] >> test.py::test[aggregate-compare_by_tuple-] >> test.py::test[blocks-combine_all_max_filter-] [GOOD] >> test.py::test[blocks-combine_all_sum-] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] [GOOD] >> test.py::test[join-premap_common_multiparents-off-ForceBlocks] |98.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.2%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test >> test.py::test[aggregate-compare_by-] [GOOD] >> test.py::test[aggregate-group_by_column-default.txt] >> test.py::test[action-dep_world_action_quote-default.txt] [GOOD] >> test.py::test[action-subquery_merge1-default.txt] [SKIPPED] >> test.py::test[aggr_factory-bitor-default.txt] [SKIPPED] >> test.py::test[aggr_factory-bottom_by-default.txt] [SKIPPED] >> test.py::test[aggr_factory-log_histogram-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt] >> test.py::test[action-empty_do-default.txt] [GOOD] >> test.py::test[action-eval_folder_via_file_in_job-] [SKIPPED] >> test.py::test[action-eval_table_with_view-default.txt] [SKIPPED] >> test.py::test[action-runtime_if_select-default.txt] [SKIPPED] >> test.py::test[aggr_factory-list-] >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> test.py::test[aggr_factory-list-] [SKIPPED] >> test.py::test[aggr_factory-mode-default.txt] [SKIPPED] >> test.py::test[aggregate-agg_phases_table2-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregation_with_named_node-] >> TopicSessionTests::BadDataSessionError >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> test.py::test[blocks-lazy_nonstrict_with_scalar_ctx-] [GOOD] >> test.py::test[column_group-hint_append2-] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail-] >> test.py::test[blocks-combine_hashed_some-] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-Results] >> test.py::test[schema-copy-other-Results] [GOOD] >> test.py::test[schema-copy-yamred_dsv_raw-ForceBlocks] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt--ForceBlocks] >> test.py::test[blocks-bitcast_block--Results] [GOOD] >> test.py::test[tpch-q10-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q10-default.txt-Results] >> test.py::test[blocks-block_input_various_types--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types--Results] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail-] [SKIPPED] >> test.py::test[column_order-insert_reorder_without_columnorder-] [SKIPPED] >> test.py::test[column_order-select_limit_offset_reorder-default.txt] >> test.py::test[blocks-block_input_various_types_2-v3-ForceBlocks] [SKIPPED] >> test.py::test[blocks-combine_hashed_sum-] >> test.py::test[blocks-block_input_various_types_2-v3-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/scheme.pb" |98.2%| [TM] {RESULT} ydb/tests/functional/backup/unittest >> test.py::test[blocks-combine_hashed_minmax_double-] [GOOD] >> test.py::test[blocks-date_equals-] >> test.py::test[aggregate-rollup_with_dict-] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Results] >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> test.py::test[blocks-date_less_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-date_sub_interval--Results] |98.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[blocks-block_input_various_types_2-v3-Results] [SKIPPED] >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> test.py::test[aggregate-group_by_column-default.txt] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping-] >> test.py::test[aggr_factory-bitand-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-insert_after_eval-] [SKIPPED] >> test.py::test[action-parallel_for-default.txt] [SKIPPED] >> test.py::test[action-process_from_subquery_with_orderby-default.txt] [SKIPPED] >> test.py::test[action-subquery_opt_args-default.txt] [SKIPPED] >> test.py::test[aggr_factory-linear_histogram-default.txt] [SKIPPED] >> test.py::test[aggregate-compare_by_nulls-default.txt] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] >> test.py::test[weak_field-weak_field_rest--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_rest--Results] >> test.py::test[aggr_factory-bitand-default.txt-Results] >> test.py::test[blocks-date_greater_scalar--Results] [GOOD] >> test.py::test[blocks-date_group_by--Results] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] >> test.py::test[blocks-date_greater_or_equal--Results] [GOOD] >> test.py::test[blocks-date_greater_scalar--ForceBlocks] >> ServerRestartTest::RestartOnGetSession [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> test.py::test[column_order-select_limit_offset_reorder-default.txt] [GOOD] >> test.py::test[count-count_distinct_from_view_concat-] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix-] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_some_fail-] [SKIPPED] >> test.py::test[expr-non_persistable_order_by_fail-] [SKIPPED] >> test.py::test[file-parse_file_in_select_as_uint64-] [SKIPPED] >> test.py::test[flatten_by-flatten_dict-] >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> test.py::test[blocks-combine_all_sum-] [GOOD] >> test.py::test[blocks-date_greater_scalar-] >> test.py::test[aggregate-aggregation_with_named_node-] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter-] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt] [GOOD] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt] >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] >> test.py::test[action-eval_column-] >> KqpTpch::Query01 >> test.py::test[join-mergejoin_with_different_key_names_norename-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-Results] [SKIPPED] >> test.py::test[join-pullup_exclusion-off-ForceBlocks] >> test.py::test[aggregate-compare_by_tuple-] [GOOD] >> test.py::test[aggregate-group_by_hop-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_bad_delay-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_star-] [SKIPPED] >> test.py::test[aggregate-group_by_session_distinct_compact-] >> test.py::test[tpch-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q20-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-lambda--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_nested_right--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] >> test.py::test[weak_field-weak_field_rest--Results] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag--ForceBlocks] |98.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value--Results] [GOOD] >> test.py::test[aggregate-error_type--Results] >> test.py::test[blocks-date_equals-] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar-] >> test.py::test[schema-copy-yamred_dsv_raw-ForceBlocks] [GOOD] >> test.py::test[schema-copy-yamred_dsv_raw-Results] >> test.py::test[blocks-combine_hashed_sum-] [GOOD] >> test.py::test[blocks-interval_add_date-] >> test.py::test[join-premap_common_multiparents-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents-off-Results] [SKIPPED] >> test.py::test[join-premap_common_semi--ForceBlocks] >> KqpFederatedQuery::ExecuteScriptWithDifferentBindingsMode [GOOD] >> KqpFederatedQuery::MultiStatementSelect >> test.py::test[pg-tpch-q09-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all-default.txt-ForceBlocks] [SKIPPED] |98.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[tpch-q10-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] >> test.py::test[produce-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_subfields--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_subfields--Results] [SKIPPED] >> TopicSessionTests::BadDataSessionError [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> TopicSessionTests::WrongFieldType >> test.py::test[window-lagging/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-lagging/aggregations--Results] >> test.py::test[aggregate-aggregate_subquery_yql_15869-default.txt] [GOOD] >> test.py::test[aggregate-aggregation_by_udf-] >> test.py::test[blocks-combine_all_sum_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] >> test.py::test[aggr_factory-bitand-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_filter_pushdown--ForceBlocks] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> test.py::test[blocks-combine_all_avg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_avg--Results] |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |98.3%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest |98.3%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[bigdate-table_arithmetic_narrow-default.txt] [GOOD] >> test.py::test[blocks-combine_all_count-] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] >> test.py::test[schema-copy-yamred_dsv_raw-Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-ForceBlocks] >> test.py::test[action-dep_world_quote_code-default.txt] >> test.py::test[action-eval_column-] [GOOD] >> test.py::test[action-eval_input_output_table-] >> test.py::test[action-dep_world_quote_code-default.txt] [SKIPPED] >> test.py::test[action-eval_for-default.txt] [SKIPPED] >> test.py::test[action-eval_typeof_output_table-] [SKIPPED] >> test.py::test[aggr_factory-avg_if-default.txt] [SKIPPED] >> test.py::test[aggr_factory-booland-default.txt] [SKIPPED] >> test.py::test[aggr_factory-count_if-default.txt] [SKIPPED] >> test.py::test[aggr_factory-min-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_session_distinct_compact-] [GOOD] >> test.py::test[aggregate-having_cast-default.txt] >> test.py::test[action-eval_input_output_table-] [SKIPPED] >> test.py::test[action-unwrap_runtime_fail_with_column_message-] [SKIPPED] >> test.py::test[aggr_factory-max-default.txt] [SKIPPED] >> test.py::test[aggr_factory-some-default.txt] [SKIPPED] >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> test.py::test[aggregate-group_by_cube_grouping-] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup-] >> test.py::test[aggr_factory-variance-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt] >> test.py::test[aggregate-count_distinct_with_filter-] [GOOD] >> test.py::test[aggregate-group_by_hop_compact-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_start-] [SKIPPED] >> test.py::test[aggregate-group_by_ru_join_simple-] |98.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[produce-reduce_subfields--Results] [SKIPPED] >> test.py::test[aggr_factory-min_by-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_expr_alias_on_subexp-] >> test.py::test[flatten_by-flatten_dict-] [GOOD] >> test.py::test[hor_join-skip_sampling-] [SKIPPED] >> test.py::test[in-in_exists_immediate_nested_subq-] >> test.py::test[aggregate-error_type--Results] [GOOD] >> test.py::test[aggregate-group_by_expr--Results] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_tuple--ForceBlocks] >> test.py::test[pg-tpcds-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-Results] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> test.py::test[insert-unique_distinct_hints--Results] [GOOD] >> test.py::test[insert-values_subquery--Results] [SKIPPED] >> test.py::test[insert_monotonic-keep_unique--Results] [SKIPPED] >> test.py::test[insert_monotonic-to_empty--Results] >> test.py::test[aggregate-compare_by_nulls-default.txt] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt] >> KqpFederatedQuery::MultiStatementSelect [GOOD] >> KqpFederatedQuery::InsertIntoBucket >> test.py::test[weak_field-weak_field_in_group_by--Results] [GOOD] >> test.py::test[blocks-date_greater_scalar-] [GOOD] >> test.py::test[blocks-decimal_op_decimal-] [SKIPPED] >> test.py::test[blocks-decimal_unary-] [SKIPPED] >> test.py::test[blocks-distinct_pure_all-] >> test.py::test[weak_field-weak_field_long_name--Results] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] [GOOD] >> test.py::test[blocks-distinct_pure_all--ForceBlocks] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[order_by-assume_over_input_desc--ForceBlocks] >> test.py::test[blocks-combine_all_count-] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop-] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] [GOOD] >> test.py::test[join-left_only_semi_and_other-off-ForceBlocks] >> test.py::TestSqsSplitMergeFifoTables::test_fifo_merge_split >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> KqpFederatedQuery::InsertIntoBucket [GOOD] >> KqpFederatedQuery::InsertIntoBucketWithSelect >> test.py::test[aggregate-group_by_expr_lookup-] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join-] >> test.py::test[blocks-date_greater_or_equal_scalar-] [GOOD] >> test.py::test[blocks-distinct_pure_keys-] >> test.py::test[aggregate-having_cast-default.txt] [GOOD] >> test.py::test[blocks-add_int8-] >> test.py::test[aggregate-aggregation_by_udf-] [GOOD] >> test.py::test[aggregate-avg_and_sum-default.txt] >> test.py::test[pg_catalog-lambda--ForceBlocks] [GOOD] >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted--ForceBlocks] >> test.py::test[pg_catalog-lambda--Results] >> test.py::test[blocks-interval_add_date-] [GOOD] >> test.py::test[blocks-interval_add_interval-] >> test.py::test[window-lagging/aggregations--Results] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-ForceBlocks] |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test.py::test[in-in_exists_immediate_nested_subq-] [GOOD] >> test.py::test[in-in_sorted-] >> TDqSolomonWriteActorTest::TestWriteFormat |98.3%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 |98.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |98.3%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> test.py::test[blocks-date_sub_interval--Results] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] >> test_break.py::test_create_minidump >> test.py::test[join-pullup_exclusion-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt] [GOOD] >> test.py::test[aggregate-group_by_gs_few_empty-] >> test.py::test[aggregate-group_by_expr_alias_on_subexp-] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr-] >> test.py::test[join-premap_common_semi--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_semi--Results] >> TopicSessionTests::WrongFieldType [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> test.py::test[blocks-date_group_by--Results] [GOOD] >> test.py::test[blocks-date_sub_scalar--Results] >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q33-default.txt-Results] >> test.py::test[blocks-filter_by_column_with_drop-] [GOOD] >> test.py::test[blocks-struct_type-] >> test.py::test[blocks-combine_all_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--ForceBlocks] >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> test.py::test[blocks-add_int8-] [GOOD] >> test.py::test[blocks-add_uint8-] >> test.py::test[aggregate-group_by_expr_only_join-] [GOOD] >> test.py::test[aggregate-group_by_gs_duo-] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt] [GOOD] >> test.py::test[aggregate-compact_distinct-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_ru_join_simple-] [GOOD] >> test.py::test[blocks-interval_add_interval-] [GOOD] >> test.py::test[blocks-sort_one_asc-] >> test.py::test[aggregate-group_by_mul_ru_ru-] >> KqpFederatedQuery::InsertIntoBucketWithSelect [GOOD] >> KqpFederatedQuery::InsertIntoBucketCaching >> test.py::test[aggregate-no_compact_distinct-] [SKIPPED] >> test.py::test[aggregate-percentile_and_avg_grouped-] >> test.py::test[aggregate-agg_filter_pushdown--ForceBlocks] [GOOD] >> test.py::test[aggregate-agg_filter_pushdown--Results] >> test.py::test[pg_catalog-lambda--Results] [GOOD] >> test.py::test[produce-process_row_and_columns-default.txt-ForceBlocks] >> test.py::test[insert_monotonic-to_empty--Results] [GOOD] >> test.py::test[join-aggr_diff_order-default.txt-Results] >> test.py::test[blocks-distinct_pure_all-] [GOOD] >> test.py::test[blocks-not-] >> test.py::test[in-in_sorted-] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt] |98.3%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} |98.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[blocks-distinct_pure_keys-] [GOOD] >> test.py::test[blocks-filter_partial_expr-] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> test.py::test[weak_field-weak_field_long_name--Results] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Results] >> SdkCredProvider::PingFromProviderSyncDiscovery >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test.py::test[schema-insert_sorted-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] >> test.py::test[aggregate-group_by_expr_order_by_expr-] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping-] >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> test.py::test[aggregate-avg_and_sum-default.txt] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value-] >> KqpFederatedQuery::InsertIntoBucketCaching [GOOD] >> KqpFederatedQuery::InsertIntoBucketValuesCast >> test.py::test[aggregate-group_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--Results] >> test.py::test[blocks-struct_type-] [GOOD] >> test.py::test[column_group-groups-lookup] >> test.py::test[blocks-distinct_pure_all--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] >> test.py::test[window-lagging/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag--Results] >> test_workload.py::TestYdbKvWorkload::test[row] >> test.py::test[aggregate-compare_tuple--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_tuple--Results] >> test.py::test[join-premap_common_semi--Results] [GOOD] >> test.py::test[join-premap_common_semi-off-ForceBlocks] >> test.py::test[blocks-sort_one_asc-] [GOOD] >> test.py::test[blocks-sort_two_asc-] >> test.py::test[order_by-assume_over_input_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_over_input_desc--Results] >> test.py::test[action-eval_atom_wrong_type_expr-] [SKIPPED] >> test.py::test[action-eval_folder-] [SKIPPED] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> test.py::test[aggregate-group_by_gs_few_empty-] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt] >> test.py::test[aggregate-group_by_gs_duo-] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru-] >> test.py::test[aggregate-percentile_and_avg_grouped-] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr-] >> test.py::test[action-eval_folder_via_file-] [SKIPPED] >> test.py::test[action-eval_regexp-] [SKIPPED] >> test.py::test[aggregate-compare_tuple-] |98.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[in-in_types_cast_all-default.txt] [GOOD] >> KqpFederatedQuery::InsertIntoBucketValuesCast [GOOD] >> KqpFederatedQuery::UpdateExternalTable >> test.py::test[aggregate-agg_filter_pushdown--Results] [GOOD] >> test.py::test[insert-append-] [SKIPPED] >> test.py::test[join-left_only_semi_and_other-off-ForceBlocks] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-ForceBlocks] >> test.py::test[join-left_only_semi_and_other-off-Results] [SKIPPED] >> test.py::test[insert-append_view_fail-] [SKIPPED] >> test.py::test[insert_monotonic-from_empty-] [SKIPPED] >> test.py::test[blocks-filter_partial_expr-] [GOOD] >> test.py::test[schema-limit_simple--ForceBlocks] >> test.py::test[insert_monotonic-non_existing_fail-] [SKIPPED] >> test.py::test[aggregate-group_compact_sorted--Results] >> test.py::test[blocks-if-] >> KqpTpch::Query07 [GOOD] >> test.py::test[join-do_not_suppres_equijoin_input_sorts-] [SKIPPED] >> test.py::test[join-equi_join_two_mult_keys-off] [SKIPPED] >> test.py::test[join-full_trivial_udf_call-] [SKIPPED] >> test.py::test[join-inner_trivial_from_concat-] [SKIPPED] >> test.py::test[join-join_key_cmp_udf-] >> KqpTpch::Query08 >> test.py::test[pg-tpcds-q33-default.txt-Results] [GOOD] >> test_break.py::test_create_minidump [GOOD] >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> test_break.py::test_minidump_script >> test.py::test[blocks-add_uint8-] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-Results] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> test.py::test[blocks-block_input-aux_columns] [SKIPPED] >> test.py::test[blocks-combine_all_count_filter_opt-] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] [GOOD] >> KqpFederatedQuery::UpdateExternalTable [GOOD] >> KqpFederatedQuery::JoinTwoSources >> test.py::test[blocks-minmax_strings_filter--Results] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-div_uint64--ForceBlocks] >> test.py::test[solomon-HistResponse-default.txt] >> test.py::test[blocks-combine_hashed_count_filter--Results] >> test.py::test[blocks-not-] [GOOD] >> test.py::test[blocks-pg_to_interval-] >> test.py::test[order_by-assume_over_input_desc--Results] [GOOD] |98.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[join-left_only_semi_and_other-off-Results] [SKIPPED] >> test.py::test[aggregate-compare_tuple-] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru-] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo-] >> test.py::test[aggregate-group_by_mul_gs_ru-] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind-] >> test.py::test[aggregate-group_by_gs_grouping-] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct_compact-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gb_ru-] >> test.py::test[aggregate-group_by_expr_semi_join-] >> test.py::test[produce-process_row_and_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_asc-] [GOOD] >> test.py::test[column_group-groups-lookup] [GOOD] >> test.py::test[join-join_key_cmp_udf-] [GOOD] >> KqpFederatedQuery::JoinTwoSources [GOOD] >> test.py::test[aggregate-group_compact_sorted--Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_by_value-] [GOOD] >> test.py::test[window-lagging/aggregations_leadlag--Results] [GOOD] >> test.py::test[blocks-if-] [GOOD] >> test.py::test[blocks-pg_to_interval-] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt-] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_tuple--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr-] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> test.py::test[blocks-combine_all_decimal-] [SKIPPED] >> test.py::test[blocks-date_sub_interval_scalar-] >> test.py::test[produce-process_row_and_columns-default.txt-Results] >> test.py::test[join-join_with_duplicate_keys_on_sorted-] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] >> test.py::test[aggregate-histogram_cdf-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-ForceBlocks] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolveCheckPartitionedBy >> test.py::test[aggregate-group_by_mul_gb_ru-] [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join-] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--Results] [GOOD] >> test.py::test[join-premap_common_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_semi-off-Results] [SKIPPED] >> test.py::test[join-pullup_extend--ForceBlocks] >> test.py::test[distinct-distinct_count_no_gouping-default.txt] >> test.py::test[aggregate-group_by_gs_subselect_asterisk-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_session_aliases-] >> test.py::test[produce-process_row_and_columns-default.txt-Results] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar-] >> test.py::test[bigdate-table_arithmetic_sub-default.txt] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join-] >> KqpTpch::Query08 [GOOD] >> KqpFederatedQuery::ExecuteScriptWithExternalTableResolveCheckPartitionedBy [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--ForceBlocks] >> test.py::test[case-case_multi_val-default.txt] >> test.py::test[column_group-groups-max] >> test.py::test[join-join_with_duplicate_keys_on_sorted-] [GOOD] >> test.py::test[aggregate-group_by_session_aliases-] [GOOD] >> test.py::test[schema-limit_simple--ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_rows_sorted_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_rows_sorted_multi_out--Results] [SKIPPED] >> test.py::test[join-aggr_diff_order-default.txt-Results] [GOOD] >> test_break.py::test_minidump_script [GOOD] >> test_break.py::test_minidump_script_args >> test.py::test[aggregate-group_by_rollup_duo-] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar-] [GOOD] >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind-] [GOOD] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt] >> test.py::test[blocks-exists-] >> test.py::test[aggregate-group_by_gs_alt_duo-] |98.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[order_by-assume_over_input_desc--Results] [GOOD] >> KqpTpch::Query09 >> test.py::test[binding-table_filter_strict_binding-default.txt] >> test.py::test[blocks-date_less--ForceBlocks] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] >> test.py::test[join-alias_where_group-off-Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_udf-] >> test.py::test[distinct-distinct_count_no_gouping-default.txt] [GOOD] >> test.py::test[join-anyjoin_common_dup-off-Results] [SKIPPED] >> test.py::test[bigdate-table_arithmetic_sub-default.txt] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--Results] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join-] [GOOD] >> test.py::test[schema-limit_simple--Results] >> test.py::test[join-lookupjoin_inner-] >> KqpFederatedQuery::ExecuteScriptWithEmptyCustomPartitioning >> test.py::test[column_group-groups-max] [GOOD] >> TopicSessionTests::ReadNonExistentTopic >> test.py::test[binding-table_filter_strict_binding-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_with_udf_by_aggregate-] [SKIPPED] >> test.py::test[distinct-distinct_union_all-default.txt] >> test.py::test[blocks-block_input_various_types-v3] [SKIPPED] >> test.py::test[aggregate-group_by_cube_duo-] >> test.py::test[column_group-hint-disable] [SKIPPED] >> test.py::test[aggregate-native_desc_group_compact_by-] [SKIPPED] >> test.py::test[blocks-filter_direct_col-] >> test.py::test[aggregate-group_by_gs_alt_duo-] [GOOD] >> test.py::test[blocks-interval_sub_interval_scalar-] [GOOD] >> test.py::test[blocks-div_uint64--ForceBlocks] [GOOD] >> KqpFederatedQuery::ExecuteScriptWithEmptyCustomPartitioning [GOOD] >> test.py::test[action-action_eval_cluster_use-] [SKIPPED] >> test.py::test[blocks-combine_all_minmax_double-] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt] >> test.py::test[column_group-hint_anon_groups-disable] [SKIPPED] >> test.py::test[column_group-hint_append_fail-] [SKIPPED] >> test.py::test[column_order-select_groupby_with_star-default.txt] >> test.py::test[bigdate-table_yt_key_filter-on] >> test.py::test[column_group-hint_diff_grp_fail-] [SKIPPED] >> test.py::test[blocks-div_uint64--Results] >> test.py::test[bigdate-table_yt_key_filter-on] [SKIPPED] >> KqpFederatedQuery::ExecuteScriptWithTruncatedMultiplyResults >> test.py::test[action-eval_drop-] [SKIPPED] >> test.py::test[pg-tpcds-q49-default.txt-Results] [GOOD] >> test.py::test[action-eval_skip_take-] >> test.py::test[case-case_multi_val-default.txt] [GOOD] |98.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[produce-process_rows_sorted_multi_out--Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail-] [SKIPPED] >> test.py::test[pg-tpcds-q77-default.txt-Results] >> test.py::test[bigdate-tz_table_fill-] [SKIPPED] >> test.py::test[column_order-align_publish_native-] [SKIPPED] >> test.py::test[schema-limit_simple--Results] [GOOD] >> test.py::test[column_group-hint_diff_grp_fail2-] [SKIPPED] >> KqpTpch::Query09 [GOOD] >> test.py::test[blocks-exists-] [GOOD] >> test.py::test[count-count_all_grouped-empty] >> test.py::test[schema-select_all_inferschema--ForceBlocks] >> test.py::test[blocks-block_input_various_types_2-v3] [SKIPPED] >> test.py::test[column_group-hint_non_lst_yson_fail-] [SKIPPED] >> KqpTpch::Query10 >> test.py::test[blocks-interval_mul-] >> test.py::test[column_group-hint_non_map_yson_fail-] [SKIPPED] >> test.py::test[blocks-date_less-] >> test.py::test[column_group-min_group-default.txt] >> test.py::test[join-lookupjoin_inner-] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-] >> test.py::test[count-count_all_grouped-empty] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf-] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--Results] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt] [SKIPPED] >> test.py::test[blocks-filter_direct_col-] [GOOD] >> test.py::test[aggregate-percentiles_containers-] >> test.py::test[distinct-distinct_union_all-default.txt] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt] >> test.py::test[blocks-filter_expr-] >> test.py::test[blocks-combine_all_minmax_double-] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt-] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] [GOOD] >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> test_ydb_backup.py::TestClusterBackupRestore::test_cluster_backup_restore [GOOD] >> test.py::test[aggregate-group_by_cube_duo-] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-ForceBlocks] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> test.py::test[aggregate-group_by_expr_and_having-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] Test command err: 2 2 >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] [GOOD] >> test.py::test[action-eval_skip_take-] [GOOD] >> test.py::test[action-pending_arg_fail-] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys-] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt] [GOOD] >> KqpFederatedQuery::ExecuteScriptWithTruncatedMultiplyResults [GOOD] >> test.py::test[blocks-date_less-] [GOOD] >> test.py::test[blocks-div_uint64--Results] [GOOD] >> KqpTpch::Query10 [GOOD] >> BulkUpsert::BulkUpsert [GOOD] >> test.py::test[window-current/ansi_current_mixed--Results] [GOOD] >> test.py::test[agg_apply-table-] >> test.py::test[aggregate-group_by_ru_join_qualified-default.txt] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt] [GOOD] >> test_example.py::TestExample::test_example >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--ForceBlocks] >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> test.py::test[column_group-min_group-default.txt] [GOOD] >> test.py::test[aggregate-percentiles_containers-] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt-] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-interval_mul-] [GOOD] >> test.py::test[column_group-hint_anon_groups-single] [SKIPPED] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] [GOOD] >> test_break.py::test_minidump_script_args [GOOD] >> test_break.py::test_compatibility_info >> test.py::test[blocks-minmax_strings_filter--Results] [GOOD] >> test.py::test[join-pullup_extend--ForceBlocks] [GOOD] >> test.py::test[column_order-select_groupby_with_star-default.txt] [GOOD] >> test.py::test[blocks-filter_expr-] [GOOD] >> test.py::test[schema-select_all_inferschema--ForceBlocks] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt] [GOOD] >> test.py::test[pg-tpcds-q77-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having-] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt] >> test.py::test[join-mergejoin_force_align3-] [SKIPPED] >> test.py::test[window-current/session_extended--Results] >> test.py::test[join-mergejoin_with_table_range-] >> test.py::test[agg_apply-table-] [SKIPPED] >> KqpTpch::Query11 >> test.py::test[aggregate-table_row_aggregation-default.txt] >> test.py::test[column_group-hint_non_str_yson_fail-] [SKIPPED] >> TopicSessionTests::SlowSession >> test.py::test[column_order-select_where-default.txt] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] >> test.py::test[case-case_then_else-default.txt] >> test.py::test[blocks-minmax_strings-] >> test.py::test[count-count_all_view_concat-] [SKIPPED] >> test.py::test[datetime-date_tz_table_sort_asc-] >> test_break.py::test_compatibility_info [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct-] [SKIPPED] >> test.py::test[count-count_all_grouped-] >> test.py::test[blocks-mod_uint64_opt2--ForceBlocks] >> test.py::test[blocks-interval_mul_scalar-] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column-] >> KqpFederatedQuery::ForbiddenCallablesForYdbTables >> test.py::test[join-mergejoin_with_table_range-] [SKIPPED] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] >> test.py::test[aggr_factory-histogram-default.txt] [SKIPPED] >> test.py::test[aggr_factory-logariphmic_histogram-default.txt] [SKIPPED] >> test.py::test[aggr_factory-median-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_list_in_key-default.txt] [SKIPPED] >> KqpTpch::Query11 [GOOD] >> test.py::test[aggregate-avg_and_sum_float-] >> test.py::test[aggregate-group_by_ru_with_select_distinct-] >> test.py::test[flatten_by-flatten_by_typed_table-] >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] >> test.py::test[datetime-date_tz_table_sort_asc-] [SKIPPED] >> test.py::test[column_order-select_where-default.txt] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] [GOOD] >> test.py::test[schema-select_all_inferschema--Results] >> test.py::test[blocks-not_opt--Results] >> test.py::test[aggregate-group_by_gs_with_rollup--ForceBlocks] >> test.py::test[hor_join-fuse_multi_outs2-outlimit] [SKIPPED] >> test.py::test[pg-tpcds-q98-default.txt-Results] >> test.py::test[aggregate-avg_and_sum_float-] [GOOD] >> test.py::test[join-mergejoin_with_table_range-off] [SKIPPED] >> test.py::test[join-opt_on_opt_side-off] [SKIPPED] >> test.py::test[join-premap_map_cross-] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off] [SKIPPED] >> test.py::test[join-pullup_inner-off] [SKIPPED] >> test.py::test[join-pushdown_filter_over_left-] >> test.py::test[aggregate-avg_with_having-default.txt] >> test.py::test[distinct-distinct_count_and_full_count-default.txt] >> test.py::test[dq-pool_trees_whitelist-] [SKIPPED] >> test.py::test[epochs-use_sorted_by_complex_type-] [SKIPPED] >> test.py::test[epochs-write_and_use_in_same_epoch-] [SKIPPED] >> test.py::test[file-file_list_simple-] [SKIPPED] >> test.py::test[file-where_key_in_file_content_typed-] [SKIPPED] >> test.py::test[flatten_by-flatten_with_join-] >> test.py::test[aggregate-group_by_ru_with_select_distinct-] [GOOD] >> KqpFederatedQuery::ForbiddenCallablesForYdbTables [GOOD] >> KqpFederatedQuery::ExecuteScriptWithLocationWithoutSlashAtTheEnd >> KqpTpch::Query12 >> test.py::test[insert-select_after_insert_relabeled-default.txt] [SKIPPED] >> test.py::test[aggregate-list_with_fold_map-] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column-] [GOOD] >> KqpFederatedQuery::ExecuteScriptWithLocationWithoutSlashAtTheEnd [GOOD] >> KqpFederatedQuery::StreamExecuteScriptWithGenericAutoDetection >> test.py::test[insert-yql-13083-existig] [SKIPPED] >> KqpTpch::Query12 [GOOD] >> test.py::test[aggregate-group_by_rollup_duo_opt-] >> test.py::test[join-anyjoin_common_dup-off] >> KqpTpch::Query13 >> test.py::test[join-anyjoin_common_dup-off] [SKIPPED] >> KqpTpch::Query13 [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off] [SKIPPED] >> KqpTpch::Query14 >> test.py::test[join-full_equal_not_null-off] [SKIPPED] >> test.py::test[join-grace_join1-off] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off] [SKIPPED] >> test.py::test[join-late_mergejoin_on_empty-] |98.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |98.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[join-pullup_extend--ForceBlocks] [GOOD] |98.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/py3test >> test_break.py::test_compatibility_info [GOOD] |98.4%| [TM] {RESULT} ydb/tests/functional/minidumps/py3test >> S3PathStyleBackup::DisableVirtualAddressing >> test.py::test[aggregate-table_row_aggregation-default.txt] [GOOD] >> test.py::test[bigdate-tz_table_pull-] >> test.py::test[distinct-distinct_count_and_full_count-default.txt] [GOOD] >> test.py::test[epochs-read_modified-] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail-] [SKIPPED] >> test.py::test[file-where_key_in_get_file_content-] [SKIPPED] >> test.py::test[flatten_by-flatten_by_opt_dict-] >> test.py::test[flatten_by-flatten_by_typed_table-] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by-] |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |98.5%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--ForceBlocks] >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> test.py::test[count-count_all_grouped-] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt] [SKIPPED] >> test.py::test[expr-yql-10180-default.txt] [SKIPPED] >> test.py::test[hor_join-filters-] >> test.py::test[case-case_then_else-default.txt] [GOOD] >> test.py::test[count-count_nullable-] >> test.py::test[schema-select_all_inferschema--Results] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-ForceBlocks] >> test.py::test[join-pushdown_filter_over_left-] [GOOD] >> test.py::test[join-star_join-] >> KqpFederatedQuery::StreamExecuteScriptWithGenericAutoDetection [GOOD] >> KqpFederatedQuery::ExecuteScriptWithGenericAutoDetection >> test.py::test[join-late_mergejoin_on_empty-] [GOOD] >> test.py::test[join-left_null_literal-] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq-] >> test.py::test[blocks-minmax_strings-] [GOOD] >> test.py::test[blocks-date_greater_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_scalar--Results] >> test.py::test[blocks-partial_blocks1-] >> test.py::test[aggregate-group_by_expr_only_join--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--Results] >> test.py::test[blocks-mod_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-mod_uint64_opt2--Results] >> test.py::test[blocks-interval_mul_scalar-] [GOOD] >> test.py::test[blocks-mod_uint64_opt2-] >> test_example.py::TestExample::test_example [GOOD] >> test_example.py::TestExample::test_example2 [GOOD] >> test_example.py::TestExample::test_linked_with_testcase >> test_example.py::TestExample::test_linked_with_testcase [GOOD] >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 >> test.py::test[aggregate-group_by_full_path-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_distinct-] >> test.py::test[aggregate-list_with_fold_map-] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt] >> test.py::test[aggregate-avg_with_having-default.txt] [GOOD] >> test.py::test[aggregate-group_by_hop_zero_delay-] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_key_check-] >> test.py::test[flatten_by-flatten_by_opt_dict-] [GOOD] >> test.py::test[flatten_by-flatten_one_field_another-] >> KqpFederatedQuery::ExecuteScriptWithGenericAutoDetection [GOOD] >> KqpFederatedQuery::ExplainScriptWithGenericAutoDetection |98.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[schema-select_all_inferschema--Results] [GOOD] >> test.py::test[blocks-not_opt--Results] [GOOD] >> test.py::test[blocks-pg_sort--Results] >> test.py::test[flatten_by-flatten_with_join-] [GOOD] >> test.py::test[hor_join-double_input-default.txt] >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> test.py::test[blocks-date_sub_scalar--Results] [GOOD] >> test.py::test[bigdate-tz_table_pull-] [GOOD] >> test.py::test[blocks-add_uint64_opt2-] >> test.py::test[blocks-mod_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-not--ForceBlocks] >> test.py::test[flatten_by-flatten_list_on_flatten_by-] [GOOD] >> test.py::test[hor_join-group_yamr-] [SKIPPED] >> test.py::test[hor_join-skip_yamr-] [SKIPPED] >> test.py::test[insert-select_operate_with_columns-] [SKIPPED] >> test.py::test[insert-trivial_literals-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q98-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] >> test.py::test[blocks-partial_blocks1-] [GOOD] >> test.py::test[blocks-pg_call-] [SKIPPED] >> test.py::test[blocks-pg_to_strings-] >> test.py::test[insert_monotonic-break_unique_fail-] [SKIPPED] >> test.py::test[insert_monotonic-several1-default.txt] [SKIPPED] >> test.py::test[join-bush_dis_in_in-off] [SKIPPED] >> test.py::test[join-equi_join_three_simple-off] [SKIPPED] >> test.py::test[join-flatten_columns2-off] [SKIPPED] >> test.py::test[join-inner_all-off] [SKIPPED] >> test.py::test[join-join_comp_common_table-off] [SKIPPED] >> test.py::test[join-join_left_cbo-] [SKIPPED] >> test.py::test[join-lookupjoin_not_selected-off] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys-] >> test_workload.py::TestYdbWorkload::test >> KqpFederatedQuery::ExplainScriptWithGenericAutoDetection [GOOD] >> KqpFederatedQuery::ReadFromDataSourceWithoutTable >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> test.py::test[blocks-mod_uint64_opt2-] [GOOD] >> test.py::test[blocks-sort_two_desc-] >> test.py::test[hor_join-filters-] [GOOD] >> test.py::test[hor_join-fuse_multi_outs1-] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1-outlimit] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_reuse-] [SKIPPED] >> test.py::test[insert-replace_inferred_op-] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys-] >> test.py::test[join-star_join-] [GOOD] >> test.py::test[join-three_equalities-] >> test.py::test[aggregate-group_by_rollup_duo_opt-] [GOOD] >> test.py::test[aggregate-group_by_session_only-] >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> test.py::test[join-anyjoin_common_nodata_keys--Results] [GOOD] >> test.py::test[join-bush_dis_in--Results] >> test.py::test[ansi_idents-order_by-default.txt] [GOOD] >> test.py::test[binding-anon_table_binding-default.txt] >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> test.py::test[binding-anon_table_binding-default.txt] [SKIPPED] >> test.py::test[binding-table_regexp_strict_binding-] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2-] [SKIPPED] >> test.py::test[blocks-compare-] >> test.py::test_wait_for_cluster_ready [GOOD] >> test.py::test_counter >> test.py::test[blocks-date_less--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less--Results] |98.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[blocks-date_sub_scalar--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_key_check-] [GOOD] >> test.py::test[aggregate-group_by_session-] >> test.py::test[flatten_by-flatten_one_field_another-] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-Results] >> test.py::test[flatten_by-flatten_with_group_by_expr-] >> TopicSessionTests::SlowSession [GOOD] >> test.py::test[aggregate-group_by_expr_only_join--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_few_empty--ForceBlocks] >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 >> test.py::test[join-lookupjoin_semi_subq-] [GOOD] >> test.py::test[count-count_nullable-] [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt] >> test.py::test_counter [GOOD] >> test.py::test_viewer_nodes >> test.py::test[join-mapjoin_on_very_complex_type-off] [SKIPPED] >> test.py::test[hor_join-double_input-default.txt] [GOOD] >> test.py::test[hor_join-out_range-default.txt] >> test.py::test[join-mergejoin_any_no_join_reduce-] >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> KqpFederatedQuery::ReadFromDataSourceWithoutTable [GOOD] >> KqpFederatedQuery::InsertIntoDataSourceWithoutTable >> test.py::test_viewer_nodes [GOOD] >> test.py::test_storage_groups [GOOD] >> test.py::test_viewer_sysinfo [GOOD] >> test.py::test_viewer_vdiskinfo [GOOD] >> test.py::test[blocks-compare-] [GOOD] >> test.py::test[blocks-add_uint64_opt2-] [GOOD] >> test.py::test[blocks-combine_all_avg-] >> test.py::test[join-commonjoin_unused_keys-] [GOOD] >> test.py::test[join-join_and_distinct_key-off] >> test.py::test_viewer_pdiskinfo [GOOD] >> test.py::test_viewer_bsgroupinfo [GOOD] >> test.py::test_viewer_tabletinfo >> test.py::test[blocks-pg_to_strings-] [GOOD] >> test.py::test[aggregate-group_by_session_only-] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt] >> test.py::test[blocks-date_add_interval_scalar-] >> test.py::test[join-join_and_distinct_key-off] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary-off] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted-off] >> test.py::test[blocks-string_as_agg_key-] >> test.py::test[join-mergejoin_force_one_sorted-off] [SKIPPED] >> test.py::test[join-mergejoin_unused_keys-] [GOOD] >> test.py::test[join-premap_merge_extrasort2-] >> test.py::test[join-mergejoin_saves_output_sort-] >> test.py::test[join-three_equalities-] [GOOD] >> test.py::test[join-three_equalities_paren-] >> test.py::test[join-premap_merge_extrasort2-] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-off] [SKIPPED] >> test.py::test[join-star_join_with_diff_complex_key-] >> test.py::test_viewer_tabletinfo [GOOD] >> test.py::test_viewer_describe >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 >> test.py::test[aggregate-group_by_session-] [GOOD] >> test.py::test_viewer_describe [GOOD] >> test.py::test_viewer_cluster [GOOD] >> test.py::test[action-eval_filter-] [SKIPPED] >> test.py::test[action-eval_for_over_subquery-default.txt] >> test.py::test[bigdate-table_yt_key_filter-default] [SKIPPED] >> test.py::test[blocks-combine_all_avg_filter_opt-] >> test.py::test_viewer_tenantinfo [GOOD] >> test.py::test_viewer_tenantinfo_db >> KqpFederatedQuery::InsertIntoDataSourceWithoutTable [GOOD] >> KqpFederatedQuery::SpecifyExternalTableInsteadOfExternalDataSource >> test.py::test[aggregate-list_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] >> test.py::test_viewer_tenantinfo_db [GOOD] >> test.py::test_viewer_healthcheck >> test.py::test[blocks-sort_two_desc-] [GOOD] >> test.py::test[blocks-sub_uint64_opt2-] >> test.py::test_viewer_healthcheck [GOOD] >> test.py::test_viewer_acl >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] [GOOD] >> test.py::test_viewer_acl [GOOD] >> test.py::test_viewer_autocomplete >> test.py::test[aggregate-group_by_rollup_key_check--Results] >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> test.py::test_viewer_autocomplete [GOOD] >> test.py::test_viewer_check_access >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] >> test.py::test_viewer_check_access [GOOD] >> test.py::test_viewer_query >> test.py::test[flatten_by-flatten_with_group_by_expr-] [GOOD] >> test.py::test[hor_join-row_num_per_sect-] >> test.py::test[join-mergejoin_any_no_join_reduce-] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-off] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested-] >> test.py::test[window-win_func_rank_by_opt_all--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--Results] >> test.py::test[blocks-not--ForceBlocks] [GOOD] >> KqpFederatedQuery::SpecifyExternalTableInsteadOfExternalDataSource [GOOD] >> KqpFederatedQuery::QueryWithNoDataInS3 >> test.py::test_viewer_query [GOOD] >> test.py::test_viewer_query_issue_13757 >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test.py::test[column_group-insert_diff_groups1_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_with_new_cols--Results] >> test.py::test[blocks-not--Results] >> test.py::test_viewer_query_issue_13757 [GOOD] >> test.py::test_viewer_query_issue_13945 >> test.py::test[aggregate-group_by_gs_with_rollup--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] >> test.py::test_viewer_query_issue_13945 [GOOD] >> test.py::test[distinct-distinct_groupby-default.txt] [GOOD] >> test.py::test[distinct-distinct_join-default.txt] >> test.py::test[window-current/session_extended--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--Results] >> test.py::test[blocks-combine_all_avg-] [GOOD] >> test.py::test[blocks-combine_all_some_filter-] >> test.py::test[blocks-string_as_agg_key-] [GOOD] >> test.py::test[blocks-tuple_nth-] [SKIPPED] >> test.py::test[column_group-hint-single] [SKIPPED] >> test.py::test_pqrb_tablet >> test.py::test[aggregate-group_by_session_distinct-] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct-] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> test.py::test[blocks-date_add_interval_scalar-] [GOOD] >> test.py::test[blocks-member-] >> test_commit.py::TestCommit::test_commit >> test.py::test[column_group-hint_diff_grp_fail3-] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail-] [SKIPPED] >> test.py::test[column_group-publish-perusage] [SKIPPED] >> test.py::test[column_order-insert_with_new_cols-] [SKIPPED] >> test.py::test[column_order-union_all-default.txt] >> test.py::test_pqrb_tablet [GOOD] >> test.py::test_viewer_nodes_issue_14992 [GOOD] >> test.py::test[hor_join-out_range-default.txt] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt] [SKIPPED] >> test.py::test[in-in_enum_single0-default.txt] >> test.py::test_operations_list [GOOD] >> test.py::test_operations_list_page [GOOD] >> test.py::test_operations_list_page_bad [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--ForceBlocks] >> test.py::test[join-three_equalities_paren-] [GOOD] >> test.py::test[aggregate-histogram_cdf-default.txt] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt] >> test.py::test_scheme_directory >> KqpFederatedQuery::QueryWithNoDataInS3 [GOOD] >> KqpFederatedQuery::ExecuteScriptWithLargeStrings >> test.py::test[join-trivial_view-off] [SKIPPED] >> test.py::test[join-yql-19081-] [SKIPPED] >> test.py::test[join-yql-8980-off] [SKIPPED] >> test.py::test[key_filter-calc_dependent-default.txt] >> test.py::test[blocks-sub_uint64_opt2-] [GOOD] >> test.py::test[blocks-top_sort_two_asc-] >> test.py::test[blocks-combine_all_avg_filter_opt-] [GOOD] >> test.py::test[blocks-combine_all_pg-] >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] >> test.py::test_scheme_directory [GOOD] >> test.py::test_topic_data |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |98.5%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> test.py::test[aggregate-list_nullable--ForceBlocks] >> test_commit.py::TestCommit::test_commit [GOOD] >> test_timeout.py::TestTimeout::test_timeout >> test.py::test[blocks-date_greater_scalar--Results] [GOOD] >> test.py::test[blocks-pg_sort--ForceBlocks] >> test.py::test[blocks-not--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested-] [GOOD] >> KqpQuerySession::NoLocalAttach >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-off] [SKIPPED] >> test.py::test[join-pullup_inner-] >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> test.py::test[join-star_join_with_diff_complex_key-] [GOOD] >> test.py::test[join-yql-12022-] [SKIPPED] >> test.py::test[join-yql-14829_left-] >> test.py::test[action-eval_for_over_subquery-default.txt] [GOOD] >> test.py::test[action-eval_values_output_table_subquery-] >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> test.py::test[join-yql-14829_left-] [SKIPPED] >> test.py::test[join-yql-8125-off] [SKIPPED] >> test.py::test[join-yql_465-] >> test.py::test[action-eval_values_output_table_subquery-] [SKIPPED] >> test.py::test[action-mixed_eval_typeof_world1-] [SKIPPED] >> test.py::test[action-nested_rewrite_io-default.txt] >> test.py::test[hor_join-row_num_per_sect-] [GOOD] >> test.py::test[in-in_compact_distinct-empty] >> test.py::test[join-mergejoin_saves_output_sort-] [GOOD] >> test.py::test[join-premap_common_multiparents-] >> test.py::test[join-premap_common_multiparents-] [SKIPPED] >> test.py::test[join-pullup_cross-off] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-] >> test.py::test[window-win_func_rank_by_opt_all--Results] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--ForceBlocks] >> test.py::test[distinct-distinct_join-default.txt] [GOOD] >> test.py::test[dq-precompute_parallel-] [SKIPPED] >> test.py::test[dq-precompute_parallel_indep-] [SKIPPED] >> test.py::test[dq-precompute_tree-default.txt] [SKIPPED] >> test.py::test[expr-as_table_emptylist-] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_column_fail-] [SKIPPED] >> test.py::test[blocks-combine_all_some_filter-] [GOOD] >> test.py::test[file-where_key_in_file_content-] [SKIPPED] >> test.py::test[flatten_by-flatten_member_is_struct-] >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 >> test.py::test[blocks-combine_hashed_count-] >> test.py::test[in-in_enum_single0-default.txt] [GOOD] >> test.py::test[insert-drop_sortness-calc] |98.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/py3test >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] |98.5%| [TM] {RESULT} ydb/tests/example/py3test >> test.py::test[column_order-union_all-default.txt] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables-] [SKIPPED] >> test.py::test[dq-wrong_script-] [SKIPPED] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt] >> test.py::test[solomon-UnknownSetting-] [GOOD] >> test.py::test[insert-drop_sortness-calc] [SKIPPED] >> test.py::test[insert-override-from_sorted] [SKIPPED] >> test.py::test[insert-select_subquery-] [SKIPPED] >> test.py::test[join-anyjoin_common_nodata_keys-off] [SKIPPED] >> test.py::test[join-anyjoin_common_nodup-off] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge-] [SKIPPED] >> test.py::test[blocks-top_sort_two_asc-] [GOOD] >> test.py::test[join-inner_trivial-off] >> test.py::test[column_group-hint_append-] >> test.py::test[key_filter-calc_dependent-default.txt] [GOOD] >> test.py::test[key_filter-contains-default.txt] >> test.py::test[join-inner_trivial-off] [SKIPPED] >> test.py::test[join-join_right_cbo-] [SKIPPED] >> test.py::test[join-join_without_column-off] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst-off] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-] >> test.py::test_topic_data [GOOD] >> test.py::test_transfer_describe >> test.py::test[column_group-hint_append-] [SKIPPED] >> test.py::test[column_group-length-single] >> test.py::test_transfer_describe [GOOD] |98.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[blocks-not--Results] [GOOD] >> test.py::test[blocks-combine_all_pg-] [GOOD] >> test.py::test[blocks-combine_hashed_avg-] >> test.py::test[aggregate-group_by_session_only_distinct-] [GOOD] >> test.py::test[aggregate-subquery_aggregation-] >> test.py::test[blocks-member-] [GOOD] >> test.py::test[column_group-hint_unk_col_fail-] [SKIPPED] >> test.py::test[column_order-select_action-default.txt] >> test.py::test[blocks-date_less--Results] [GOOD] >> test.py::test[blocks-div_uint64_opt2--ForceBlocks] >> test.py::test[aggregate-percentile_interval-default.txt] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-wo_compat] [SKIPPED] >> test.py::test[binding-table_from_binding-default.txt] >> KqpTpch::Query22 [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt] [GOOD] >> test.py::test[aggr_factory-every-default.txt] [SKIPPED] >> test.py::test[aggr_factory-multi-] |98.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] |98.6%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest |98.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/py3test >> test.py::test_transfer_describe [GOOD] |98.6%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> test.py::test[join-star_join_inners_vk_sorted-] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off] [SKIPPED] >> test.py::test[join-yql-14847-off] [SKIPPED] >> test.py::test[key_filter-convert-] >> test.py::test[action-action_eval_cluster_table_for-] [SKIPPED] >> test.py::test[action-eval_anon_table-] [SKIPPED] >> test.py::test[action-insert_after_eval_xlock-] [SKIPPED] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt] [GOOD] >> test.py::test[flatten_by-flatten_list-] >> test.py::test[aggr_factory-avg-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_distinct_expr_with_udf-] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] [GOOD] >> test.py::test[aggregate-group_by_hop--ForceBlocks] >> test.py::test[join-lookupjoin_inner_2o-] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-] >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> test.py::test[blocks-combine_hashed_avg-] [GOOD] >> test.py::test[blocks-combine_hashed_sum_many_keys-] >> test.py::test[binding-table_from_binding-default.txt] [GOOD] >> test.py::test[in-in_compact_distinct-empty] [GOOD] >> test.py::test[blocks-add_int16-] >> test.py::test[join-pullup_inner-] [GOOD] >> test.py::test[join-split_to_list_as_key-] >> test.py::test[join-yql_465-] [GOOD] >> test.py::test[key_filter-yql-19420-] [SKIPPED] >> test.py::test[in-in_sorted_by_tuple-] >> test.py::test[aggregate-group_by_gs_few_empty--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_few_empty--Results] >> test.py::test[flatten_by-flatten_member_is_struct-] [GOOD] >> test.py::test[blocks-pg_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_sort--Results] >> test.py::test[lineage-nested_lambda_fields-default.txt] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force-] [SKIPPED] >> test.py::test[optimizers-nonselected_direct_row-] >> test.py::test[solomon-BadDownsamplingAggregation-] >> test.py::test[hor_join-fuse_multi_outs2-] [SKIPPED] >> test.py::test[hor_join-less_outs-] [SKIPPED] >> test.py::test[hor_join-out_hor_join-default.txt] >> test.py::test[optimizers-nonselected_direct_row-] [SKIPPED] >> test.py::test[optimizers-yql-7324_duplicate_arg-] >> test.py::test[key_filter-contains-default.txt] [GOOD] >> test.py::test[key_filter-is_null_multi_key-] >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] >> test.py::test[aggregate-list_nullable--ForceBlocks] [GOOD] >> test.py::test[aggregate-list_nullable--Results] >> test.py::test[aggregate-subquery_aggregation-] [GOOD] >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> TopicSessionTests::RestartSessionIfQueryStopped >> test.py::test[blocks-block_output_various_types-] [SKIPPED] >> test.py::test[blocks-combine_all_avg_filter-] >> test.py::test[blocks-combine_hashed_count-] [GOOD] >> test.py::test[blocks-not_opt-] >> test.py::test[column_order-select_action-default.txt] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-Results] >> test.py::test[column_order-insert_with_new_cols--Results] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Results] >> test.py::test[column_order-union_all_positional_columns_count_fail-] [SKIPPED] >> test.py::test[count-count_all-default.txt] >> test.py::test[column_group-length-single] [GOOD] >> test.py::test[column_group-many_inserts-] [SKIPPED] >> test.py::test[column_group-respull-] [SKIPPED] >> test.py::test[count-count-] >> test.py::test[join-lookupjoin_inner_empty_subq-] [GOOD] >> test.py::test[join-mapjoin_dup_key-off] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-off] [SKIPPED] >> test.py::test[join-mergejoin_left_null_column-off] [SKIPPED] >> test.py::test[join-premap_common_cross-off] [SKIPPED] >> test.py::test[join-premap_common_inner_filter-] [SKIPPED] >> test.py::test[join-premap_common_right_tablecontent-off] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 >> test.py::test[join-premap_common_right_tablecontent-off] [SKIPPED] >> test.py::test[join-premap_context_dep-off] [SKIPPED] >> test.py::test[join-premap_map_semi-off] [SKIPPED] >> test.py::test[join-pullup_cross-] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] >> test.py::test[solomon-BadDownsamplingDisabled-] |98.6%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest >> test.py::test[join-split_to_list_as_key-] [GOOD] >> test.py::test[join-star_join_inners-off] [SKIPPED] >> test.py::test[join-yql-8125-] [SKIPPED] >> test.py::test[key_filter-dict_contains_optional-] >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues >> test.py::test[optimizers-yql-7324_duplicate_arg-] [GOOD] >> test.py::test[order_by-assume_cut_prefix-] >> test.py::test[in-in_sorted_by_tuple-] [GOOD] >> test.py::test[in-in_tablesource_to_equijoin-] >> test.py::test[order_by-assume_cut_prefix-] [SKIPPED] >> test.py::test[order_by-literal_with_assume-] [SKIPPED] >> test.py::test[order_by-order_by_expr_with_deps-default.txt] >> test.py::test[flatten_by-flatten_list-] [GOOD] >> test.py::test[aggregate-list_nullable--Results] [GOOD] >> test.py::test[aggregate-percentiles_containers--ForceBlocks] >> test.py::test[blocks-combine_all_avg_filter-] [GOOD] >> test.py::test[key_filter-is_null_multi_key-] [GOOD] >> test.py::test[key_filter-no_bypass_merge-] [SKIPPED] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt] >> test.py::test[flatten_by-struct_without_correlation-default.txt] >> test.py::test[blocks-add_int16-] [GOOD] >> test.py::test[blocks-add_int64-] >> test.py::test[hor_join-out_hor_join-default.txt] [GOOD] >> test.py::test[hor_join-yield_on-default.txt] >> test.py::test[blocks-combine_all_max-] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] >> test.py::test[hor_join-yield_on-default.txt] [SKIPPED] >> test.py::test[in-basic_in-default.txt] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore >> test.py::test[blocks-div_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-div_uint64_opt2--Results] >> test.py::test[aggregate-group_by_rollup_key_check--Results] [GOOD] >> test.py::test[aggregate-group_by_session_nopush--Results] [SKIPPED] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] >> test.py::test[aggr_factory-multi-] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt] [SKIPPED] >> test.py::test[aggregate-aggregate_key_column-default.txt] >> test.py::test[key_filter-convert-] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter-] |98.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/py3test >> test_timeout.py::TestTimeout::test_timeout [GOOD] |98.6%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test >> test.py::test[aggregate-aggregate_distinct_expr_with_udf-] [GOOD] >> test.py::test[aggregate-group_by_expr-] >> test.py::test[blocks-combine_hashed_sum_many_keys-] [GOOD] >> test.py::test[blocks-decimal_comparison-] [SKIPPED] >> test.py::test[blocks-interval_add_date_scalar-] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] >> test.py::test[blocks-not_opt-] [GOOD] >> test.py::test[blocks-pg_sort-] >> test.py::test[join-bush_dis_in--Results] [GOOD] >> test.py::test[join-bush_dis_in_in--Results] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> test.py::test[join-pullup_cross-] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-] |98.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test.py::test[count-count_all-default.txt] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt] >> test.py::test[in-basic_in-default.txt] [GOOD] >> test.py::test[insert-double_append_to_anonymous-] >> test.py::test[blocks-add_int64-] [GOOD] >> test.py::test[blocks-block_input-] [SKIPPED] >> test.py::test[blocks-coalesce_bools-] >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] >> TDqPqRdReadActorTests::TestReadFromTopic2 >> test.py::test[insert-double_append_to_anonymous-] [SKIPPED] >> test.py::test[insert-drop_sortness-desc] [SKIPPED] >> test.py::test[insert-insert_null-default.txt] [SKIPPED] >> test.py::test[insert-override-from_sorted_calc] [SKIPPED] >> test.py::test[insert-override-with_view] [SKIPPED] >> test.py::test[insert-override_view_fail-] [SKIPPED] >> test_log_scenario.py::TestLogScenario::test[180] >> test.py::test[aggregate-aggregate_key_column-default.txt] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda-] >> test.py::test[join-anyjoin_common_dup-] >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> test.py::test[in-in_tablesource_to_equijoin-] [GOOD] >> test.py::test[insert-anonymous_tables-default.txt] [SKIPPED] >> test.py::test[insert-insert_from_other-] [SKIPPED] >> test.py::test[insert-override-from_sorted_desc] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt] [GOOD] >> test.py::test[key_filter-range_union-] >> test.py::test[insert-override-from_sorted_desc] [SKIPPED] >> test.py::test[insert-select_with_sort_limit-default.txt] >> test.py::test[count-count-] [GOOD] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration >> test.py::test[insert-select_with_sort_limit-default.txt] [SKIPPED] >> test.py::test[insert-trivial_select-default.txt] [SKIPPED] >> test.py::test[insert-use_anon_table_before_commit_fail-] [SKIPPED] >> test.py::test[insert_monotonic-overlaping_fail-] [SKIPPED] >> test.py::test[join-from_in_front_join-off] >> test.py::test[key_filter-lambda_with_null_filter-] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt] >> test.py::test[aggregate-group_by_gs_few_empty--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_simp--ForceBlocks] >> test.py::test[flatten_by-struct_without_correlation-default.txt] [GOOD] >> test.py::test[hor_join-merge_multiouts_part-] [SKIPPED] >> test.py::test[hor_join-sorted_out-] >> test.py::test[expr-non_persistable_inner_select_fail-] [SKIPPED] >> test.py::test[flatten_by-flatten_dict_by_opt-] >> test.py::test[aggregate-group_by_expr-] [GOOD] >> test.py::test[aggregate-group_by_rollup_aggr_expr-] >> test.py::test[join-from_in_front_join-off] [SKIPPED] >> test.py::test[join-inner_all_right-off] [SKIPPED] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> test.py::test[blocks-interval_add_date_scalar-] [GOOD] >> test.py::test[join-join_cbo_3_tables-] [SKIPPED] >> test.py::test[join-join_without_correlation_and_struct_access-] >> test.py::test[aggregate-group_by_hop--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop--Results] [SKIPPED] >> test.py::test[blocks-tuple_type-] >> test.py::test[key_filter-dict_contains_optional-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[blocks-combine_all_max-] [GOOD] >> test.py::test[blocks-combine_hashed_min-] >> test.py::test[key_filter-split_input_with_key_filter1-] [SKIPPED] >> test.py::test[key_filter-yql-14157-] [SKIPPED] >> test.py::test[limit-zero_limit-default.txt] >> test.py::test[solomon-BadDownsamplingInterval-] >> TDqPqRdReadActorTests::SessionError >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> TDqPqRdReadActorTests::SessionError [GOOD] >> test_query_cache.py::TestQueryCache::test >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] [GOOD] >> test.py::test[window-win_multiaggr_library--ForceBlocks] >> TDqPqRdReadActorTests::ReadWithFreeSpace >> test.py::test[window-full/noncompact_with_nulls--Results] [GOOD] >> test.py::test[order_by-order_by_expr_with_deps-default.txt] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey-] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test.py::test[blocks-pg_sort-] [GOOD] >> test.py::test[blocks-string_pass-] >> test.py::test[window-generic/aggregations_after_current--Results] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt] [GOOD] >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> test.py::test[flatten_by-flatten_two_fields-] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> test.py::test[join-anyjoin_common_dup-] [GOOD] >> test.py::test[join-bush_in_in-off] >> test.py::test[aggregate-aggregate_with_lambda-] [GOOD] >> test.py::test[aggregate-error_type-] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] [GOOD] >> test.py::test[window-win_func_rank_by_part--ForceBlocks] >> test.py::test[join-bush_in_in-off] [SKIPPED] >> test.py::test[join-bush_in_in_in-off] [SKIPPED] >> test.py::test[join-equi_join_by_expr-off] >> test.py::test[aggregate-error_type-] [SKIPPED] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt] >> test.py::test[blocks-coalesce_bools-] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt] [SKIPPED] >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[blocks-date_greater-] >> test.py::test[solomon-Basic-default.txt] >> test.py::test[join-equi_join_by_expr-off] [SKIPPED] >> test.py::test[join-join_without_correlation_and_dict_access-] >> test.py::test[key_filter-yql_5895_or-default.txt] [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt] >> test.py::test[aggregate-group_by_rollup_aggr_expr-] [GOOD] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt] >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint |98.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] >> test.py::test[hor_join-sorted_out-] [GOOD] >> test.py::test[in-in_compact_distinct-] >> test.py::test[blocks-combine_hashed_min-] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename-] [GOOD] >> test.py::test[blocks-combine_hashed_set-] |98.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[aggregate-group_by_hop--Results] [SKIPPED] >> test.py::test[join-star_join_inners_premap-] [SKIPPED] >> test.py::test[join-trivial_view-] >> test.py::test[flatten_by-flatten_dict_by_opt-] [GOOD] >> test.py::test[flatten_by-flatten_expr_join-] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged >> test.py::test[blocks-string_pass-] [GOOD] >> test.py::test[blocks-string_with-] >> test.py::test[action-action_eval_cluster_and_table-default.txt] >> test.py::test[action-action_eval_cluster_and_table-default.txt] [SKIPPED] >> test.py::test[action-subquery_merge2-default.txt] [SKIPPED] >> test.py::test[action-subquery_merge_evaluate-default.txt] [SKIPPED] >> test.py::test[action-subquery_merge_nested_subquery-] [SKIPPED] >> test.py::test[flatten_by-flatten_two_fields-] [GOOD] >> test.py::test[blocks-tuple_type-] [GOOD] >> test.py::test[blocks-type_and_callable_stats-] [SKIPPED] >> test.py::test[column_group-hint-perusage] [SKIPPED] >> test.py::test[agg_apply-avg_const_interval-] >> test.py::test[hor_join-yql-6477_table_path-default.txt] [SKIPPED] >> test.py::test[in-huge_in-default.txt] >> test.py::test[column_order-select_plain_nosimple-default.txt] >> test.py::test[agg_apply-avg_const_interval-] [SKIPPED] >> test.py::test[aggr_factory-corellation-default.txt] [SKIPPED] >> test.py::test[aggr_factory-stddev-default.txt] [SKIPPED] >> test.py::test[aggr_factory-top-default.txt] >> test.py::test[aggr_factory-top-default.txt] [SKIPPED] >> test.py::test[aggr_factory-udaf-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_gs_and_having-default.txt] >> test.py::test[order_by-order_by_num_key_and_subkey-] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Results] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] >> test.py::test[join-left_join_null_column-] >> test.py::test[key_filter-range_union-] [GOOD] >> test.py::test[limit-zero_limit-default.txt] [GOOD] >> test.py::test[lineage-isolated-default.txt] [SKIPPED] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt] [SKIPPED] >> test.py::test[order_by-sort_with_take-] >> test.py::test[order_by-order_by_tablerecord_column-] >> test.py::test[order_by-sort_with_take-] [SKIPPED] >> test.py::test[lineage-reduce_all-default.txt] [SKIPPED] >> test.py::test[lineage-select_all_filter-default.txt] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt] >> test.py::test[join-join_without_correlation_and_dict_access-] [GOOD] >> test.py::test[join-left_semi_with_other-off] [SKIPPED] >> test.py::test[limit-empty_input_after_limit-default.txt] [GOOD] >> test.py::test[lineage-member_over_if_struct-default.txt] >> test.py::test[pg-insert-] [SKIPPED] >> test.py::test[pg-select_from_columns_qstar-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_gs_join_aliases-default.txt] [GOOD] >> test.py::test[lineage-union_all_tablerow-default.txt] [SKIPPED] >> test.py::test[multicluster-basic-default.txt] [SKIPPED] >> test.py::test[optimizers-simplified_path_constraint-] [SKIPPED] >> test.py::test[optimizers-unused_columns_window-] >> test.py::test[join-left_trivial-] >> test.py::test[lineage-member_over_if_struct-default.txt] [SKIPPED] >> test.py::test[lineage-reduce-default.txt] [SKIPPED] >> test.py::test[lineage-select_union_all-default.txt] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt] [SKIPPED] >> test.py::test[pg-select_subquery2_qstar-default.txt] [SKIPPED] >> test.py::test[pg-select_unionall_self-default.txt] [SKIPPED] >> test.py::test[aggregate-group_by_session_compact-] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill-] [SKIPPED] >> test.py::test[order_by-literal_with_assume_desc-] >> test.py::test[pg-tpcds-q08-default.txt] [SKIPPED] >> test.py::test[blocks-combine_hashed_set-] [GOOD] >> test.py::test[blocks-date_greater_or_equal-] >> test.py::test[order_by-literal_with_assume_desc-] [SKIPPED] >> test.py::test[order_by-order_by_expr-] >> test.py::test[pg-tpcds-q31-default.txt] >> test.py::test[aggregate-group_by_ru_partition_by_grouping-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_extended-] >> test.py::test[flatten_by-flatten_expr_join-] [GOOD] >> test.py::test[hor_join-fuse_multi_usage-] [SKIPPED] >> test.py::test[in-in_compact_distinct-] [GOOD] >> test.py::test[insert-after_group_by-default.txt] >> test.py::test[join-trivial_view-] [GOOD] >> test.py::test[join-yql-8131-] >> test.py::test[hor_join-max_in_tables-] >> test.py::test[pg-tpcds-q31-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q54-default.txt] >> test.py::test[insert-after_group_by-default.txt] [SKIPPED] >> test.py::test[insert-fail_read_view_after_modify-] [SKIPPED] >> test.py::test[insert-from_erasure_to_none-] [SKIPPED] >> test.py::test[insert-keepmeta_proto_fail-] [SKIPPED] >> test.py::test[insert-literals_to_string-default.txt] [SKIPPED] >> test.py::test[insert_monotonic-keep_meta-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q54-default.txt] [SKIPPED] >> test_query_cache.py::TestQueryCache::test [GOOD] >> test.py::test[join-alias_where_group-] >> test.py::test[pg-tpcds-q62-default.txt] >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q09-default.txt] >> test.py::test[blocks-string_with-] [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> test.py::test[blocks-date_greater-] [GOOD] >> test.py::test[blocks-date_not_equals-] >> test.py::test[case-case_size_eq_cast-default.txt] >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test.py::test[pg-tpch-q09-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q11-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q19-default.txt] >> TDqPqRdReadActorTests::Backpressure >> test.py::test[pg-tpch-q19-default.txt] [SKIPPED] >> test.py::test[produce-process_multi_in_single_out-] >> test.py::test[produce-process_multi_in_single_out-] [SKIPPED] >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] >> RowDispatcherTests::OneClientOneSession >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt] >> RowDispatcherTests::OneClientOneSession [GOOD] >> RowDispatcherTests::TwoClientOneSession [GOOD] >> RowDispatcherTests::SessionError >> test.py::test[join-left_join_null_column-] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt] >> RowDispatcherTests::SessionError [GOOD] >> test.py::test[join-left_trivial-] [GOOD] >> test.py::test[produce-reduce_all_expr-default.txt] [SKIPPED] >> test.py::test[produce-reduce_by_struct-default.txt] >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> RowDispatcherTests::TwoClients4Sessions >> test.py::test[join-left_join_right_pushdown_optional-] [SKIPPED] >> test.py::test[join-left_trivial-off] [SKIPPED] >> test.py::test[join-lookupjoin_with_cache-off] [SKIPPED] >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> test.py::test[join-lookupjoin_bug8533-off] [SKIPPED] >> test.py::test[aggregate-group_by_session_compact-] [GOOD] >> test.py::test[produce-reduce_by_struct-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in-] >> test.py::test[aggregate-group_by_gs_and_having-default.txt] [GOOD] >> test.py::test[aggregate-group_by_gs_simp-] >> test.py::test[join-mergejoin_force_align1-off] [SKIPPED] >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> test.py::test[join-mergejoin_any_no_join_reduce-off] >> test.py::test[action-discard-default.txt] >> test.py::test[produce-reduce_multi_in-] [SKIPPED] >> test.py::test[produce-reduce_typeinfo-] >> test.py::test[column_order-select_plain_nosimple-default.txt] [GOOD] >> test.py::test[join-mergejoin_left_null_column-] >> test.py::test[aggregate-group_compact_sorted_distinct_complex-] >> test.py::test[action-discard-default.txt] [SKIPPED] >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> test.py::test[join-mergejoin_any_no_join_reduce-off] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted-] >> test.py::test[optimizers-unused_columns_window-] [GOOD] >> test.py::test[produce-reduce_typeinfo-] [SKIPPED] >> test.py::test[ql_filter-integer_single_disable_prune-] [SKIPPED] >> test.py::test[distinct-distinct_by_tuple-default.txt] >> test.py::test[order_by-order_by_tablerecord_column-] [GOOD] >> test.py::test[order_by-sort_simple-] >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession >> test.py::test[hor_join-max_in_tables-] [GOOD] >> test.py::test[in-in_ansi_join-] >> test.py::test[action-eval_input_output_table_subquery-] >> test.py::test[blocks-date_greater_or_equal-] [GOOD] >> test.py::test[optimizers-yql-15210_sqlin-] >> test.py::test[in-huge_in-default.txt] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt] [SKIPPED] >> test.py::test[sampling-read-dynamic] >> test.py::test[in-in_tuple_table-default.txt] >> test.py::test[aggregate-group_by_session_extended-] [GOOD] >> test.py::test[aggregate-group_by_tz_date-] >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> test.py::test[action-eval_input_output_table_subquery-] [SKIPPED] >> test.py::test[action-evaluate_match_type-default.txt] [SKIPPED] >> test.py::test[in-in_tuple_table-default.txt] [SKIPPED] >> test.py::test[sampling-read-dynamic] [SKIPPED] >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> test.py::test[blocks-date_top_sort-] [SKIPPED] >> test.py::test[sampling-reduce_with_presort-] [SKIPPED] >> test.py::test[join-yql-8131-] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt] >> test.py::test[schema-limit_simple-] [SKIPPED] >> test.py::test[blocks-distinct_opt_state_all-] >> test.py::test[action-evaluate_pure-] >> test.py::test[action-eval_if-default.txt] >> test.py::test[in-in_with_list_dict-default.txt] >> test.py::test[schema-select_all-row_spec_diff_sort_desc] >> test.py::test[blocks-date_not_equals-] [GOOD] >> test.py::test[blocks-decimal_avg-] [SKIPPED] >> test.py::test[blocks-decimal_multiplicative_ops-] [SKIPPED] >> test.py::test[blocks-decimal_op_decimal_scalar-] >> test.py::test[blocks-decimal_op_decimal_scalar-] [SKIPPED] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] >> test.py::test[blocks-distinct_mixed_keys-] >> test.py::test[join-alias_where_group-] [GOOD] >> test.py::test[case-case_size_eq_cast-default.txt] [GOOD] >> test.py::test[coalesce-coalesce_few_opt-] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> test.py::test[join-anyjoin_common_nodup-] >> test.py::test[join-mergejoin_left_null_column-] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-] >> test.py::test[order_by-order_by_expr-] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt] [SKIPPED] >> test.py::test[distinct-distinct_by_tuple-default.txt] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted-] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q15-default.txt] >> test.py::test[aggregate-group_by_tz_date-] [GOOD] >> test.py::test[bigdate-table_yt_native-on] [SKIPPED] >> test.py::test[binding-table_concat_binding-default.txt] >> test.py::test[aggregate-group_by_gs_simp-] [GOOD] >> test.py::test[aggregate-group_by_hop_bad_interval-] >> test.py::test[optimizers-yql-15210_sqlin-] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt] [SKIPPED] >> test.py::test[order_by-SortByTwoFieldsDesc-] >> test.py::test[pg-tpcds-q33-default.txt] >> test.py::test[distinct-distinct_count_only-default.txt] >> test.py::test[join-mergejoin_saves_output_sort_unmatched-] [SKIPPED] >> test.py::test[aggregate-group_by_hop_bad_interval-] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner-] >> test.py::test[binding-table_concat_binding-default.txt] [SKIPPED] >> test.py::test[blocks-add_decimal-] >> test.py::test[aggregate-group_compact_sorted_distinct_complex-] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt] >> test.py::test[order_by-sort_simple-] [GOOD] >> test.py::test[aggregate-group_by_hop_expr_key-] >> test.py::test[pg-tpcds-q32-default.txt] >> test.py::test[pg-tpcds-q33-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q41-default.txt] >> test.py::test[blocks-add_decimal-] [SKIPPED] >> test.py::test[blocks-add_uint32-] >> test.py::test[aggregate-group_by_hop_expr_key-] [SKIPPED] >> test.py::test[binding-table_range_strict_binding-default.txt] [SKIPPED] >> test.py::test[blocks-add_int32-] >> test.py::test[schema-select_all-row_spec_diff_sort_desc] [GOOD] >> test.py::test[aggregate-group_by_ru_join-] >> test.py::test[action-eval_if-default.txt] [GOOD] >> test.py::test[action-evaluate_queries-] >> test.py::test[window-win_multiaggr_library--ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] >> test.py::test[schema-select_all_inferschema_op_custom_tmp-] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt] [GOOD] >> test.py::test[limit-limit_skip_take-default.txt] >> test.py::test[pg-tpcds-q32-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q48-default.txt] >> test.py::test[pg-tpcds-q48-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q69-default.txt] >> test.py::test[pg-tpcds-q41-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q60-default.txt] >> test.py::test[pg-tpcds-q69-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q71-default.txt] >> test.py::test[aggregate-percentiles_containers--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_containers--Results] >> test.py::test[pg-tpcds-q60-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q93-default.txt] >> test.py::test[action-evaluate_pure-] [GOOD] >> test.py::test[pg-tpcds-q71-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q95-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q93-default.txt] [SKIPPED] >> test.py::test[aggr_factory-count-default.txt] [SKIPPED] >> test.py::test[in-in_with_list_dict-default.txt] [GOOD] >> test.py::test[insert-two_input_tables-] [SKIPPED] >> test.py::test[insert_monotonic-truncate_fail-] [SKIPPED] >> test.py::test[join-anyjoin_common_nodata_keys-] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt] >> test.py::test[produce-process_with_python_stream-] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail] [SKIPPED] >> test.py::test[produce-reduce_with_assume-] [SKIPPED] >> test.py::test[produce-reduce_with_presort_diff_order-] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] Test command err: 2025-05-05T10:02:09.105770Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [1:30:2057] 2025-05-05T10:02:09.105979Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [1:25:2054] 2025-05-05T10:02:09.105988Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [1:25:2054] 2025-05-05T10:02:09.105994Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:26:2054] 2025-05-05T10:02:09.105998Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:26:2054] 2025-05-05T10:02:09.106003Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [3:27:2054] 2025-05-05T10:02:09.106007Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [3:27:2054] 2025-05-05T10:02:09.106023Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-05-05T10:02:09.106044Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-05-05T10:02:09.106061Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2025-05-05T10:02:09.106068Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-05-05T10:02:09.106625Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-05-05T10:02:09.106639Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-05-05T10:02:09.106656Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:31:2055] 2025-05-05T10:02:09.106683Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:31:2055] 2025-05-05T10:02:09.106688Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:31:2055] 2025-05-05T10:02:09.106698Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:32:2056] 2025-05-05T10:02:09.106702Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:31:2055] to new [2:32:2056] 2025-05-05T10:02:09.106706Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:32:2056] 2025-05-05T10:02:09.106713Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-05-05T10:02:09.106717Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-05-05T10:02:09.106725Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-05-05T10:02:09.106730Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-05-05T10:02:09.128178Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [5:30:2057] 2025-05-05T10:02:09.128221Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [5:25:2054] 2025-05-05T10:02:09.128225Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [5:25:2054] 2025-05-05T10:02:09.128229Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [6:26:2054] 2025-05-05T10:02:09.128231Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [6:26:2054] 2025-05-05T10:02:09.128234Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [7:27:2054] 2025-05-05T10:02:09.128236Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [7:27:2054] 2025-05-05T10:02:09.128249Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2025-05-05T10:02:09.128268Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2025-05-05T10:02:09.128280Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2025-05-05T10:02:09.128286Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2025-05-05T10:02:09.141453Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Successfully bootstrapped, local coordinator id [9:5:2052] 2025-05-05T10:02:09.141489Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Successfully bootstrapped, local coordinator id [9:6:2053] 2025-05-05T10:02:09.141496Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Successfully bootstrapped, local coordinator id [9:7:2054] 2025-05-05T10:02:09.141504Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-05-05T10:02:09.141509Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.141513Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.141576Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-05-05T10:02:09.141580Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.141583Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.141590Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-05-05T10:02:09.141597Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.141600Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.155946Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T10:02:09.156002Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T10:02:09.156028Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.156033Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.164319Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T10:02:09.164397Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.164403Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.167483Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T10:02:09.167543Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.167549Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.171944Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.171986Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T10:02:09.172009Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Coordination node successfully created 2025-05-05T10:02:09.172015Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Start session 2025-05-05T10:02:09.172267Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.172276Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.175214Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.175245Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": 2025-05-05T10:02:09.175261Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-05-05T10:02:09.175273Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:339 } 2025-05-05T10:02:09.175284Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Coordination node successfully created 2025-05-05T10:02:09.175289Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Start session 2025-05-05T10:02:09.175463Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Coordination node successfully created 2025-05-05T10:02:09.175471Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Start session 2025-05-05T10:02:09.176127Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Session successfully created 2025-05-05T10:02:09.177402Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Session successfully created 2025-05-05T10:02:09.177714Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Session successfully created 2025-05-05T10:02:09.177785Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Semaphore successfully created 2025-05-05T10:02:09.177793Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Try to acquire semaphore 2025-05-05T10:02:09.177858Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Describe semaphore 2025-05-05T10:02:09.179219Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Semaphore successfully acquired 2025-05-05T10:02:09.179234Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Semaphore successfully created 2025-05-05T10:02:09.179238Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Try to acquire semaphore 2025-05-05T10:02:09.179293Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Describe semaphore 2025-05-05T10:02:09.179403Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Semaphore successfully described: coordinator id [9:6:2053] generation 1 2025-05-05T10:02:09.179409Z node 9 :FQ_ROW_DISPATCHER INFO: TLeaderElection [9:9:2056] Send TEvCoordinatorChanged to [9:4:2051] 2025-05-05T10:02:09.181007Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Semaphore successfully created 2025-05-05T10:02:09.181020Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Try to acquire semaphore 2025-05-05T10:02:09.181088Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Descri ... ation node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T10:03:14.630612Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [38:18:2059] 2025-05-05T10:03:14.630678Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:14:2056], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-05-05T10:03:14.630702Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-05-05T10:03:14.630722Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 1 2025-05-05T10:03:14.630755Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:15:2057], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-05-05T10:03:14.630781Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:14:2056] query id QueryId 2025-05-05T10:03:14.630792Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 0 query id QueryId 2025-05-05T10:03:14.630804Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:14:2056] query id QueryId 2025-05-05T10:03:14.630813Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:14:2056] query id QueryId 2025-05-05T10:03:14.630819Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 1 query id QueryId 2025-05-05T10:03:14.630825Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:14:2056] query id QueryId 2025-05-05T10:03:14.630831Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:15:2057] query id QueryId 2025-05-05T10:03:14.630837Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 0 query id QueryId 2025-05-05T10:03:14.630843Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:15:2057] query id QueryId 2025-05-05T10:03:14.630849Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:15:2057] query id QueryId 2025-05-05T10:03:14.630854Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 1 query id QueryId 2025-05-05T10:03:14.630860Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:15:2057] query id QueryId 2025-05-05T10:03:14.630869Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:14:2056], reason ActorUnknown 2025-05-05T10:03:14.630875Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:14:2056] query id QueryId 2025-05-05T10:03:14.630889Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:15:2057], reason ActorUnknown 2025-05-05T10:03:14.630893Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:15:2057] query id QueryId 2025-05-05T10:03:14.630901Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:22:2063] 2025-05-05T10:03:14.630907Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:23:2064] 2025-05-05T10:03:14.724805Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [40:17:2058], tenant Tenant 2025-05-05T10:03:14.727186Z node 40 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [40:18:2059] 2025-05-05T10:03:14.727223Z node 40 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [40:19:2060] Successfully bootstrapped, local coordinator id [40:18:2059] 2025-05-05T10:03:14.727237Z node 40 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-05-05T10:03:14.727242Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T10:03:14.727246Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T10:03:14.727523Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [40:18:2059] 2025-05-05T10:03:14.727578Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-05-05T10:03:14.727609Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-05-05T10:03:14.727654Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:15:2057], read group connection_id2, topicPath topic part id 0 query id QueryId cookie 1 2025-05-05T10:03:14.727668Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id2 topic topic part id 0 2025-05-05T10:03:14.727687Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:22:2063] to [40:14:2056] query id QueryId 2025-05-05T10:03:14.727699Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:14:2056] part id 0 query id QueryId 2025-05-05T10:03:14.727711Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:22:2063] to [40:14:2056] query id QueryId 2025-05-05T10:03:14.727720Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:23:2064] to [40:15:2057] query id QueryId 2025-05-05T10:03:14.727725Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:15:2057] part id 0 query id QueryId 2025-05-05T10:03:14.727730Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:23:2064] to [40:15:2057] query id QueryId 2025-05-05T10:03:14.727739Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:14:2056] topic topic query id QueryId 2025-05-05T10:03:14.727743Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:14:2056] query id QueryId 2025-05-05T10:03:14.727749Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:22:2063] 2025-05-05T10:03:14.727760Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:15:2057] topic topic query id QueryId 2025-05-05T10:03:14.727763Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:15:2057] query id QueryId 2025-05-05T10:03:14.727767Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:23:2064] 2025-05-05T10:03:14.955664Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [42:17:2058], tenant Tenant 2025-05-05T10:03:14.957634Z node 42 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [42:18:2059] 2025-05-05T10:03:14.957663Z node 42 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [42:19:2060] Successfully bootstrapped, local coordinator id [42:18:2059] 2025-05-05T10:03:14.957677Z node 42 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-05-05T10:03:14.957682Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T10:03:14.957686Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T10:03:14.957869Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [42:18:2059] 2025-05-05T10:03:14.957914Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [43:16:2053], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 42 2025-05-05T10:03:14.957939Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-05-05T10:03:14.966868Z node 42 :FQ_ROW_DISPATCHER ERROR: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: UNKNOWN: Temporary failure in name resolution } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2025-05-05T10:03:14.966960Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvTryConnect to node id 43 2025-05-05T10:03:14.967011Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: EvNodeConnected, node id 43 2025-05-05T10:03:14.967264Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-05-05T10:03:14.967334Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-05-05T10:03:14.967350Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-05-05T10:03:14.967374Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 41 2025-05-05T10:03:14.967380Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-05-05T10:03:14.967398Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-05-05T10:03:14.967404Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-05-05T10:03:14.967420Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 42 2025-05-05T10:03:14.967425Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [43:16:2053] query id QueryId 2025-05-05T10:03:14.967437Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [42:22:2063] 2025-05-05T10:03:15.035518Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [44:17:2058], tenant Tenant 2025-05-05T10:03:15.037800Z node 44 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [44:18:2059] 2025-05-05T10:03:15.037832Z node 44 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [44:19:2060] Successfully bootstrapped, local coordinator id [44:18:2059] 2025-05-05T10:03:15.037850Z node 44 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-05-05T10:03:15.037855Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T10:03:15.037863Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-05-05T10:03:15.037893Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [44:18:2059] 2025-05-05T10:03:15.037939Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [44:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-05-05T10:03:15.037963Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-05-05T10:03:15.038027Z node 44 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [44:22:2063] to [44:14:2056] query id QueryId 2025-05-05T10:03:15.038037Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [44:14:2056] topic topic query id QueryId 2025-05-05T10:03:15.038042Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [44:14:2056] query id QueryId 2025-05-05T10:03:15.038050Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [44:22:2063] >> test.py::test[pg-tpch-q14-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in_presort-] [SKIPPED] >> test.py::test[join-anyjoin_common_nodata_keys-] [SKIPPED] >> test.py::test[join-filter_joined-] >> test.py::test[produce-reduce_with_python_few_keys-] [SKIPPED] >> test.py::test[sampling-orderedjoin_right_sample-default.txt] >> test.py::test[produce-reduce_with_python_filter_and_having-] [SKIPPED] >> test.py::test[ql_filter-integer_bounds-] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> test.py::test[sampling-orderedjoin_right_sample-default.txt] [SKIPPED] >> test.py::test[coalesce-coalesce_few_opt-] [GOOD] |98.7%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest >> test.py::test[join-anyjoin_common_nodup-] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff-] [SKIPPED] >> test.py::test[schema-def_values-] >> test.py::test[blocks-distinct_mixed_keys-] [GOOD] >> test.py::test[blocks-finalize_hashed_keys-] >> test.py::test[sampling-read-] [SKIPPED] >> test.py::test[schema-copy-read_schema] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5-] [SKIPPED] >> test.py::test[join-bush_in_in-] >> test.py::test[aggregate-group_by_gs_simp--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_simp--Results] >> test.py::test[schema-patchtype-] >> test.py::test[column_order-insert_tmp-default.txt] [SKIPPED] >> test.py::test[in-in_ansi_join-] [GOOD] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt] >> test.py::test[in-in_immediate_subquery-default.txt] >> test.py::test[column_order-insert_with_desc_sort_and_native_types-default.txt] [SKIPPED] >> test.py::test[blocks-add_int32-] [GOOD] >> test.py::test[column_order-winfunc-default.txt] >> test.py::test[blocks-distinct_opt_state_all-] [GOOD] >> test.py::test[blocks-pg_to_numbers-] >> test.py::test[distinct-distinct_count_only-default.txt] [GOOD] >> test.py::test[dq-blacklisted_pragmas1-] [SKIPPED] >> test.py::test[blocks-block_input_mapreduce-] [SKIPPED] >> test.py::test[blocks-block_input_various_types-] [SKIPPED] >> test.py::test[window-win_func_rank_by_part--ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Results] >> test.py::test[dq-truncate_local-default.txt] [SKIPPED] >> test.py::test[epochs-reset_sortness_on_append-] [SKIPPED] >> test.py::test[flatten_by-flatten_with_subquery-default.txt] [SKIPPED] >> test.py::test[window-win_func_rank_by_part--Results] >> test.py::test[blocks-coalesce_ints-] >> test.py::test[action-evaluate_queries-] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt] >> test.py::test[schema-select_all_inferschema_op_custom_tmp-] [GOOD] >> test.py::test[schema-select_fields_inferschema-] >> test.py::test[hor_join-out_max_outtables-default.txt] >> test.py::test[aggregate-group_by_ru_join-] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt] >> test.py::test[action-subquery_merge_nested_world-default.txt] [SKIPPED] >> test.py::test[aggregate-GroupByTwoFields-] >> test.py::test[join-mergejoin_with_reverse_key_order-] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off] >> test.py::test[order_by-SortByTwoFieldsDesc-] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off] [SKIPPED] >> test.py::test[join-pullup_extra_columns-] [SKIPPED] >> test.py::test[join-simple_columns_partial-] >> test.py::test[order_by-order_by_expr_mul_cols-] >> test.py::test[limit-limit_skip_take-default.txt] [GOOD] >> test.py::test[blocks-add_uint32-] [GOOD] >> test.py::test[blocks-add_uint64_opt-] >> test.py::test[lineage-error_type-] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt] >> test.py::test[lineage-with_inline-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-7767_key_filter_with_view-] [SKIPPED] >> test.py::test[order_by-limit-] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner-] [GOOD] >> test.py::test[join-premap_merge_extrasort1-] [SKIPPED] >> test.py::test[join-filter_joined-] [GOOD] >> test.py::test[join-from_in_front_join-] >> test.py::test[join-premap_merge_extrasort2-off] [SKIPPED] >> test.py::test[join-premap_no_premap-] >> test.py::test[schema-def_values-] [GOOD] >> test.py::test[join-premap_no_premap-] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap-] >> test.py::test[schema-insert_sorted-read_schema] >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] >> test.py::test[join-premap_nonseq_flatmap-] [SKIPPED] >> test.py::test[join-pullup_exclusion-] >> test.py::test[schema-insert_sorted-read_schema] [SKIPPED] >> test.py::test[schema-user_schema_mix2-] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> test.py::test[in-in_immediate_subquery-default.txt] [GOOD] >> test.py::test[insert-append_proto_fail-] [SKIPPED] >> test.py::test[schema-patchtype-] [GOOD] >> test.py::test[join-bush_dis_in_in--Results] [GOOD] >> test.py::test[solomon-InvalidProject-] >> test.py::test[schema-read_schema_change_other-] [SKIPPED] >> test.py::test[insert-from_two_sorted_by_calc-default.txt] [SKIPPED] >> test.py::test[insert-override-proto] >> test.py::test[join-bush_in-off-Results] [SKIPPED] >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] >> test.py::test[aggregate-aggregate_with_const_yson_options-default.txt] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test.py::test[schema-remap_desc-] [SKIPPED] >> test.py::test[schema-select_all_inferschema-] >> test.py::test[insert-override-proto] [SKIPPED] >> test.py::test[insert_monotonic-not_all_fail-] >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] [SKIPPED] >> test.py::test[aggregate-dedup_state_keys-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] >> test.py::test[blocks-coalesce_ints-] [GOOD] >> test.py::test[bigdate-table_arithmetic-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-not_all_fail-] [SKIPPED] >> test.py::test[join-anyjoin_merge_nodup-] >> test.py::test[aggregate-GroupByTwoFields-] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt] >> test.py::test[blocks-combine_all_min_filter_opt-] >> test.py::test[hor_join-out_max_outtables-default.txt] [GOOD] >> test.py::test[insert-append_sorted-to_sorted] >> test.py::test[bigdate-table_common_type-default.txt-Results] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt] [SKIPPED] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> test.py::test[insert-append_sorted-to_sorted] [SKIPPED] |98.7%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test >> test.py::test[insert-append_sorted-to_sorted_calc] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt] >> test.py::test[order_by-order_by_expr_mul_cols-] [GOOD] >> test.py::test[order_by-sort-] >> test.py::test[insert-append_sorted-to_sorted_calc] [SKIPPED] >> test.py::test[join-simple_columns_partial-] [GOOD] >> test.py::test[join-three_equalities_paren-off] [SKIPPED] >> test.py::test[key_filter-dict_contains-default.txt] >> test.py::test[insert-append_with_read_udf_fail-] [SKIPPED] >> test.py::test[insert-multiappend_sorted-default.txt] [SKIPPED] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] Test command err: 2025-05-05T10:02:39.182763Z node 1 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T10:02:39.184074Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T10:02:39.184119Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-05-05T10:02:39.184168Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-05-05T10:02:39.184176Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:02:39.187917Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 05 May 2025 10:02:39 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-05-05T10:02:39.187977Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:02:49.292317Z node 2 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T10:02:49.298493Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T10:02:49.302221Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-05-05T10:02:49.305664Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T10:02:49.310412Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T10:02:49.313143Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T10:02:49.316955Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T10:02:49.320364Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T10:02:49.323223Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T10:02:49.325017Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2025-05-05T10:02:49.325189Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-05-05T10:02:49.325304Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T10:02:49.325428Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T10:02:49.325431Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T10:02:49.403063Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:02:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:02:49.403241Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T10:02:49.403248Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T10:02:49.431348Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:02:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:02:49.431435Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T10:02:49.431445Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T10:02:49.455193Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:02:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:02:49.455308Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T10:02:49.455312Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T10:02:49.522323Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:02:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:02:49.522449Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T10:02:49.522455Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T10:02:49.551913Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:02:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:02:49.552017Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2025-05-05T10:02:49.552021Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-05-05T10:02:49.576027Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:02:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:02:49.576061Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:02:49.593245Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Mon, 05 May 2025 10:02:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 500} 2025-05-05T10:02:49.593282Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:02:49.653241Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:02:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:02:49.653288Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:02:59.852220Z node 3 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T10:02:59.852531Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T10:02:59.852592Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2025-05-05T10:02:59.852630Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2025-05-05T10:02:59.852640Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:02:59.857406Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Mon, 05 May 2025 10:02:59 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 10} 2025-05-05T10:02:59.857444Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:03:09.987996Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T10:03:09.990831Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T10:03:09.994138Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-05-05T10:03:09.997431Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-05-05T10:03:09.998497Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2025-05-05T10:03:09.999434Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-05-05T10:03:09.999630Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-05-05T10:03:09.999746Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2025-05-05T10:03:09.999756Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-05-05T10:03:10.027995Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Mon, 05 May 2025 10:03:10 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 400} 2025-05-05T10:03:10.028037Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2025-05-05T10:03:10.100364Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:03:10 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:03:10.100416Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2025-05-05T10:03:10.144811Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 05 May 2025 10:03:10 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-05-05T10:03:10.144874Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:03:10.436734Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-05-05T10:03:10.436806Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T10:03:10.436843Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-05-05T10:03:10.436902Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-05-05T10:03:10.436911Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2025-05-05T10:03:10.472405Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 05 May 2025 10:03:10 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-05-05T10:03:10.472461Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:03:10.472518Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-05-05T10:03:10.472557Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-05-05T10:03:10.472584Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-05-05T10:03:10.472587Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-05-05T10:03:10.489175Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 05 May 2025 10:03:10 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-05-05T10:03:10.489223Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer >> test.py::test[insert-override-] [SKIPPED] >> test.py::test[blocks-finalize_hashed_keys-] [GOOD] >> test.py::test[insert-yql-13083-] [SKIPPED] >> test.py::test[join-alias_where_group-off] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> test.py::test[aggregate-group_by_ru_join_star-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_star-] >> test.py::test[blocks-group_by_complex_key-] >> test.py::test[window-win_func_rank_by_part--Results] [GOOD] >> test.py::test[window-yql-14738-default.txt-ForceBlocks] >> test.py::test[join-alias_where_group-off] [SKIPPED] >> test.py::test[join-equi_join_three_simple-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> test.py::test[join-bush_in_in-] [GOOD] >> test.py::test[join-equi_join_three_asterisk_eval-] |98.7%| [TS] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest >> test.py::test[blocks-pg_to_numbers-] [GOOD] >> test.py::test[blocks-top_sort_one_asc-] >> test.py::test[schema-user_schema_mix2-] [GOOD] >> test.py::test[select-calculated_values-default.txt] >> test.py::test[order_by-limit-] [GOOD] >> test.py::test[order_by-literal_complex-] [SKIPPED] >> test.py::test[order_by-singular-default.txt] [SKIPPED] >> test.py::test[pg-nulls-default.txt] >> test.py::test[column_order-winfunc-default.txt] [GOOD] >> test.py::test[count-count_by_nulls-] >> test.py::test[schema-select_all_inferschema-] [GOOD] >> test.py::test[schema-select_field-read_schema] >> test.py::test[pg-nulls-default.txt] [SKIPPED] >> test.py::test[join-from_in_front_join-] [GOOD] >> test.py::test[join-full_trivial-off] >> test.py::test[aggregate-dedup_state_keys-] [GOOD] >> test.py::test[aggregate-disable_blocks_with_spilling-] [SKIPPED] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt] >> test.py::test[pg-pg_types_orderby-] [SKIPPED] >> test.py::test[schema-select_fields_inferschema-] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt] [SKIPPED] >> test.py::test[schema-user_schema_override-] [SKIPPED] >> test.py::test[pg-tpcds-q24-default.txt] >> test.py::test[join-pullup_exclusion-] [GOOD] >> test.py::test[join-pullup_renaming-off] [SKIPPED] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test.py::test[join-simple_columns_partial-off] [SKIPPED] >> test.py::test[json-jsondocument/select-] >> test.py::test[window-generic/aggregations_after_current--Results] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] >> test.py::test[join-anyjoin_merge_nodup-] [GOOD] >> test.py::test[join-bush_dis_in_in-] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> test.py::test[pg-tpcds-q24-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q49-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q51-default.txt] >> test.py::test[join-full_trivial-off] [SKIPPED] >> test.py::test[join-inner_with_order-off] [SKIPPED] >> test.py::test[join-join_and_distinct_key-] >> test.py::test[blocks-combine_all_min_filter_opt-] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping_and_expr-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] [GOOD] |98.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> test.py::test[solomon-LabelColumns-default.txt] >> test.py::test[blocks-add_uint64_opt-] [GOOD] >> test.py::test[blocks-combine_all_sum_filter-] >> test.py::test[aggregate-group_by_session_extended_subset-] >> test.py::test[pg-tpcds-q51-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q67-default.txt] >> test.py::test[order_by-sort-] [GOOD] >> test.py::test[pg-select_columnref1-default.txt] [SKIPPED] >> test.py::test[pg-select_table2-default.txt] >> test.py::test[aggregate-group_by_session_star-] [GOOD] >> test.py::test[pg-select_table2-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q22-default.txt] >> test.py::test[pg-tpcds-q67-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q70-default.txt] >> test.py::test[key_filter-dict_contains-default.txt] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt] >> test.py::test[aggregate-group_compact_sorted_distinct-] >> test.py::test[aggregate-group_by_gs_simp--Results] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q47-default.txt] >> test.py::test[aggregate-percentiles_containers--Results] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> test.py::test[pg-tpcds-q70-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q78-default.txt] >> test.py::test[blocks-group_by_complex_key-] [GOOD] >> test.py::test[blocks-string_len_and_cmp-] |98.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> test.py::test[pg-tpcds-q78-default.txt] [SKIPPED] >> test.py::test[join-equi_join_three_simple-] [GOOD] >> test.py::test[pg-tpcds-q47-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q83-default.txt] >> test.py::test[join-inner_all_right-] >> test.py::test[pg-tpcds-q83-default.txt] [SKIPPED] >> test.py::test[pragma-file-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q50-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> test.py::test[blocks-top_sort_one_asc-] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q76-default.txt] >> test.py::test[produce-process_with_assume-] >> test.py::test[select-calculated_values-default.txt] [GOOD] >> test.py::test[select-discard-default.txt] >> test.py::test[produce-process_with_assume-] [SKIPPED] >> test.py::test[produce-process_with_python_stream-empty] >> test.py::test[column_group-hint_append_fail-diff_grp] [SKIPPED] >> test.py::test[pg-tpcds-q76-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q79-default.txt] >> test.py::test[produce-process_with_python_stream-empty] [SKIPPED] >> test.py::test[produce-reduce_multi_in-sorted] [SKIPPED] >> test.py::test[select-discard-default.txt] [SKIPPED] >> test.py::test[select-table_content_from_double_opt-default.txt] [SKIPPED] >> test.py::test[column_order-select_win_func-default.txt] >> test.py::test[produce-reduce_multi_in_sampling-] [SKIPPED] >> test.py::test[ql_filter-integer_escaping-] >> test.py::test[join-equi_join_three_asterisk_eval-] [GOOD] >> test.py::test[join-full_join-off] >> test.py::test[schema-select_field-read_schema] [GOOD] >> test.py::test[select-unlabeled_1000-] >> test.py::test[pg-tpcds-q79-default.txt] [SKIPPED] >> test.py::test[ql_filter-integer_escaping-] [SKIPPED] >> test.py::test[schema-append_to_desc_with_remap-] [SKIPPED] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt] [SKIPPED] >> test.py::test[select-append_to_value_1000-] >> test.py::test[join-full_join-off] [SKIPPED] >> test.py::test[join-grace_join1-] >> test.py::test[pg-tpch-q02-default.txt] >> test.py::test[count-count_by_nulls-] [GOOD] >> test.py::test[select-refselect-1000] [SKIPPED] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] |98.7%| [TM] {RESULT} ydb/tests/functional/encryption/py3test >> test.py::test[join-grace_join1-] [SKIPPED] >> test.py::test[count-count_const_no_grouping-default.txt] >> test.py::test[pg-tpch-q02-default.txt] [SKIPPED] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> test.py::test[join-inner_all-] >> test.py::test[select-result_size_limit-] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q05-default.txt] [SKIPPED] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] >> test.py::test[join-bush_dis_in_in-] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off] |98.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] [SKIPPED] >> test.py::test[pg-tpch-q15-default.txt] >> test.py::test[epochs-read_modified--Results] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt] [GOOD] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> test.py::test[json-jsondocument/select-] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt] >> test.py::test[key_filter-is_null_with_condition-] >> test.py::test[pg-tpch-q15-default.txt] [SKIPPED] >> test.py::test[blocks-combine_all_sum_filter-] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off] [SKIPPED] >> test.py::test[join-count_bans-] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt] [GOOD] >> test.py::test[pg_catalog-lambda-] [SKIPPED] >> test.py::test[lambda-lambda_with_tie-default.txt] >> test.py::test[produce-process_multi_out-] [SKIPPED] >> test.py::test[produce-process_pure_with_sort-default.txt] [SKIPPED] >> test.py::test[produce-process_streaming_count-default.txt] >> test.py::test[blocks-complex_scalars-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test.py::test[select-match_clause-] >> test.py::test[aggregate-group_compact_sorted_distinct-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset-] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> test.py::test[aggregate-group_compact_sorted-] >> test.py::test[join-join_and_distinct_key-] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> test.py::test[aggregate-library_error_in_aggregation_fail-] [SKIPPED] >> test.py::test[bigdate-table_explicit_cast-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> test.py::test[join-left_join_right_pushdown_no_opt-] >> KqpFederatedQuery::ExecuteScriptWithLargeStrings [GOOD] >> KqpFederatedQuery::ExecuteScriptWithLargeFile >> test.py::test[blocks-date_less_or_equal-] [GOOD] >> test.py::test[blocks-date_not_equals_scalar-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test.py::test[blocks-string_len_and_cmp-] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> test.py::test[count-count_nullable_sub-default.txt] |98.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |98.8%| [TS] {RESULT} ydb/tests/library/ut/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test.py::test[select-unlabeled_1000-] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] |98.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[aggregate-group_by_gs_simp--Results] [GOOD] >> test.py::test[join-inner_all_right-] [GOOD] >> test.py::test[join-inner_all-] [GOOD] >> test.py::test[join-left_all-] >> test.py::test[join-inner_on_key_only-off] >> test.py::test[select-where_in-default.txt] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> test.py::test[join-inner_on_key_only-off] [SKIPPED] >> test.py::test[join-inner_trivial-] >> test.py::test[count-count_const_no_grouping-default.txt] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc-] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs-] >> test.py::test[select-append_to_value_1000-] [GOOD] >> test.py::test[select-boolean_where-] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> test.py::test[datetime-date_tz_table_sort_desc-] [SKIPPED] >> test.py::test[dq-blacklisted_pragmas-] [SKIPPED] >> test.py::test[dq-mem_limit-] [SKIPPED] >> test.py::test[expr-empty_iterator-] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs-] [SKIPPED] >> test.py::test[select-match_clause-] [GOOD] >> test.py::test[column_order-select_win_func-default.txt] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt] >> test.py::test[select-refselect-] >> test.py::test[solomon-UnknownSetting-] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup-] >> test.py::test[select-result_size_limit-] [GOOD] >> test.py::test[select-result_size_limit_with_fill-] >> test.py::test[key_filter-is_null_with_condition-] [GOOD] >> test.py::test[select-refselect-] [SKIPPED] >> test.py::test[select-result_rows_limit-] >> test.py::test[column_order-union_all_positional_unordered_fail-] [SKIPPED] >> test.py::test[count-boolean_count-] >> test.py::test[blocks-complex_scalars-] [GOOD] >> test.py::test[blocks-date_group_by-] [SKIPPED] >> test.py::test[blocks-distinct_opt_state_keys-] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber |98.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[aggregate-percentiles_containers--Results] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt] [GOOD] >> test.py::test[lineage-select_group_by_key-default.txt] [SKIPPED] >> test.py::test[multicluster-extend-default.txt] [SKIPPED] >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> test.py::test[produce-process_streaming_count-default.txt] [GOOD] >> test.py::test[produce-reduce_with_python_input_stream-] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content-] [SKIPPED] >> test.py::test[sampling-subquery_filter-default.txt] [SKIPPED] >> test.py::test[count-count_nullable_sub-default.txt] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt] >> test.py::test[join-left_join_right_pushdown_no_opt-] [GOOD] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt] >> test.py::test[schema-copy-other] [SKIPPED] >> test.py::test[aggregate-group_compact_sorted-] [GOOD] >> test.py::test[aggregate-percentiles_grouped-] >> test.py::test[bigdate-table_explicit_cast-default.txt] [GOOD] >> test.py::test[bigdate-table_yt_native-default] >> test.py::test[bigdate-table_yt_native-default] [SKIPPED] >> test.py::test[join-left_null_literal-off] >> test.py::test[schema-copy-schema] [SKIPPED] >> test_tpch.py::TestTpchS1::test_tpch[1] >> test.py::test[join-left_null_literal-off] [SKIPPED] >> test.py::test[join-left_only_with_other-off] [SKIPPED] >> test.py::test[schema-insert-schema] [SKIPPED] >> test.py::test[bigdate-tz_table_rw-] >> test.py::test[join-lookupjoin_inner_empty_subq-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-] >> test.py::test[schema-select_all-row_spec_part] >> test.py::test[bigdate-tz_table_rw-] [SKIPPED] >> test.py::test[binding-bind_select-default.txt] >> test.py::test[join-left_all-] [GOOD] >> test.py::test[join-left_join_right_pushdown_null-] >> test.py::test[blocks-date_not_equals_scalar-] [GOOD] >> test.py::test[blocks-date_sub_scalar-] >> test.py::test[join-inner_trivial-] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-] >> test.py::test[join-count_bans-] [GOOD] >> test.py::test[join-equi_join_two_mult_keys-] >> test.py::test[select-where_in-default.txt] [GOOD] |98.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] >> test.py::test[select-where_with_lambda-] >> test.py::test[select-result_rows_limit-] [GOOD] >> test.py::test[select-reuse_named_node-default.txt] |98.8%| [TM] {RESULT} ydb/tests/fq/solomon/py3test >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt] [GOOD] >> test.py::test[key_filter-split_input_with_key_filter2-] [SKIPPED] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.NO] >> test.py::test[expr-empty_iterator-] [GOOD] >> test.py::test[expr-evaluate_parse_inf_nan-] >> test.py::test[limit-limit_over_sort_desc_in_subquery-] >> test.py::test[select-result_size_limit_with_fill-] [GOOD] >> test.py::test[select-select_all_ordered-default.txt] >> test.py::test[aggregate-group_by_gs_with_rollup-] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt] >> test.py::test[aggregate-aggregation_and_order-default.txt] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys-] [GOOD] >> test.py::test[select-boolean_where-] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt] >> test.py::test[aggregate-group_by_expr_dict-] >> test.py::test[count-boolean_count-] [GOOD] >> test.py::test[blocks-interval_sub_interval-] >> test.py::test[distinct-distinct_count_and_avg-default.txt] >> test.py::test[schema-select_all-row_spec_part] [GOOD] >> test.py::test[schema-select_simple-default.txt] >> test.py::test[join-left_join_right_pushdown_null-] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-] >> test.py::test[binding-bind_select-default.txt] [GOOD] >> test.py::test[binding-insert_binding-] [SKIPPED] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt] [GOOD] >> test.py::test[optimizers-yql-5978_fill_multi_usage-] [SKIPPED] >> test.py::test[optimizers-yt_shuffle_by_keys-] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-] [SKIPPED] >> test.py::test[aggregate-percentiles_grouped-] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt] >> test.py::test[order_by-order_by_dot_column-default.txt] >> test.py::test[blocks-date_less_scalar-] >> test.py::test[join-mergejoin_big_primary_unique-] >> test.py::test[join-equi_join_two_mult_keys-] [GOOD] >> test.py::test[join-inner_grouped-] >> test.py::test[distinct-distinct_columns-default.txt] [GOOD] >> test.py::test[dq-read_cost-default.txt] [SKIPPED] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_NONE-client0] >> test.py::test[expr-langver-] [SKIPPED] >> test.py::test[expr-len-] >> test.py::test[join-join_semi_correlation_in_order_by-] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off] [SKIPPED] >> test.py::test[join-join_without_correlation_and_struct_access-off] >> KqpQuerySession::NoLocalAttach [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-off] [SKIPPED] >> test.py::test[join-left_all-off] [SKIPPED] >> test.py::test[blocks-date_sub_scalar-] [GOOD] >> test.py::test[blocks-div_uint64_opt2-] >> test.py::test[join-left_join_right_pushdown_simple-] >> test.py::test[select-where_with_lambda-] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt] [SKIPPED] >> test.py::test[type_v3-float-] [SKIPPED] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native-] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] >> test.py::test[aggregate-group_by_expr_dict-] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native-] [SKIPPED] >> test.py::test[udf-udaf_distinct-] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt] >> test.py::test[select-select_all_ordered-default.txt] [GOOD] >> test.py::test[select-to_dict-default.txt] >> test.py::test[blocks-interval_sub_interval-] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic-] >> test.py::test[udf-udaf_distinct-] [SKIPPED] >> test.py::test[view-secure_eval_dyn-] >> test.py::test[view-secure_eval_dyn-] [SKIPPED] >> test.py::test[weak_field-weak_field_join_where-] >> test.py::test[schema-select_simple-default.txt] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc] >> test.py::test[distinct-distinct_count_and_avg-default.txt] [GOOD] >> test.py::test[limit-limit_over_sort_desc_in_subquery-] [GOOD] >> test.py::test[lineage-reduce_all_row-default.txt] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o-] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-off] >> test.py::test[join-mergejoin_big_primary_unique-] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner-] >> test.py::test[distinct-distinct_star-default.txt] >> test.py::test[select-reuse_named_node-default.txt] [GOOD] >> test.py::test[select-sampleselect-1000] >> test.py::test[lineage-select_mix_fields-default.txt] >> test.py::test[lineage-select_mix_fields-default.txt] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt] [SKIPPED] >> test.py::test[distinct-distinct_star-default.txt] [SKIPPED] >> test.py::test[distinct-distinct_star1-] >> test.py::test[bigdate-implicit_cast_callable-default.txt] [GOOD] >> test.py::test[lineage-window_many-default.txt] [SKIPPED] >> test.py::test[multicluster-map_force-] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt] >> test.py::test[multicluster-map_force-] [SKIPPED] >> test.py::test[multicluster-sort_force-] [SKIPPED] >> test.py::test[join-inner_grouped-] [GOOD] >> test.py::test[epochs-read_modified--Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage-] >> test.py::test[join-join_comp_map_table-] >> test.py::test[expr-len-] [GOOD] >> test.py::test[flatten_by-flatten_few_fields-] >> test.py::test[blocks-div_uint64_opt2-] [GOOD] >> test.py::test[blocks-pg_tofrom-] >> test.py::test[expr-evaluate_parse_inf_nan-] [GOOD] >> test.py::test[flatten_by-flatten_with_resource-] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_all-] [SKIPPED] >> test.py::test[in-in_noansi_join-] >> test.py::test[blocks-date_less_scalar-] [GOOD] >> test.py::test[blocks-date_sub-] >> Transfer::CreateTransfer_EnterpiseVersion >> test.py::test[select-corr_name_in_select-default.txt] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple-] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt] >> test.py::test[join-left_only_semi_and_other-] >> test.py::test[aggregate-group_by_gs_flatten_expr-default.txt] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref-] >> test.py::test[order_by-order_by_dot_column-default.txt] [GOOD] >> test.py::test[order_by-order_by_dynum_desc-default.txt] |98.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |98.8%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> test.py::test[select-to_dict-default.txt] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt] >> test.py::test[weak_field-weak_field_join_where-] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt] >> test.py::test[blocks-lazy_nonstrict_basic-] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-off] [GOOD] >> test.py::test[join-mergejoin_force_per_link-] >> test.py::test[select-sampleselect-1000] [GOOD] >> test.py::test[select-select_all_from_concat-default.txt] [SKIPPED] >> test.py::test[blocks-mod_uint64-] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt] >> test.py::test[join-mergejoin_semi_composite_to_inner-] [GOOD] >> test.py::test[distinct-distinct_star1-] [GOOD] >> test.py::test[schema-select_with_map-sorted_desc] [GOOD] >> test.py::test[select-dict_lookup-default.txt] >> test.py::test[join-mergejoin_semi_to_inner-] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows >> test.py::test[dq-read_cost_native-default.txt] [SKIPPED] >> test.py::test[epochs-use_and_drop_anonymous-] [SKIPPED] >> test.py::test[expr-double_join_with_list_from_range-] >> test.py::test[expr-double_join_with_list_from_range-] [SKIPPED] >> test.py::test[file-file_constness-] [SKIPPED] >> test.py::test[optimizers-test_fuse_map_take-default.txt] [GOOD] >> test.py::test[order_by-literal_single_item_sort-] >> test.py::test[file-parse_file_in_select_as_str-] >> test.py::test[file-parse_file_in_select_as_str-] [SKIPPED] >> test.py::test[hor_join-out_mem_limit-default.txt] >> test.py::test[order_by-literal_single_item_sort-] [SKIPPED] >> test.py::test[pg-join_using_multiple2-] [SKIPPED] >> Transfer::CreateTransfer_EnterpiseVersion [GOOD] >> Transfer::CreateTransfer_TargetNotFound >> test.py::test[pg-select_yql_type-] [SKIPPED] >> test.py::test[pg-tpcds-q56-default.txt] >> test.py::test[join-join_comp_map_table-] [GOOD] >> test.py::test[join-lookupjoin_inner-off] >> test.py::test[aggregate-group_by_ru_join_simple_fs_multiusage-] [GOOD] >> test.py::test[join-lookupjoin_inner-off] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o-off] >> test.py::test[aggregate-percentiles_ungrouped-] >> test.py::test[join-lookupjoin_inner_1o-off] [SKIPPED] >> test.py::test_local [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off] >> test.py::test[pg-tpcds-q56-default.txt] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off] [SKIPPED] >> Transfer::CreateTransfer_TargetNotFound [GOOD] >> Replication::Types >> test.py::test[pg-tpcds-q68-default.txt] >> test.py::test[join-left_only_semi_and_other-] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-] >> test.py::test[select-dict_lookup_by_key-default.txt] [GOOD] >> test.py::test[join-mapjoin_dup_key-] >> test.py::test[blocks-date_sub-] [GOOD] >> test.py::test[blocks-div_uint64-] >> test.py::test[select-from_in_front-default.txt] >> test.py::test[blocks-pg_tofrom-] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt] [GOOD] >> test.py::test[tpch-q21-default.txt] >> test.py::test[window-yql-14738-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14738-default.txt-Results] >> test.py::test[blocks-sort_one_desc-] >> test.py::test[aggregate-group_by_rollup_column_ref-] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func-] >> test.py::test[pg-tpcds-q68-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q77-default.txt] >> test.py::test[in-in_noansi_join-] [GOOD] >> test.py::test[tpch-q21-default.txt] [SKIPPED] >> test.py::test[order_by-order_by_dynum_desc-default.txt] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt] >> test.py::test[join-mergejoin_force_per_link-] [GOOD] >> test.py::test[pg-tpcds-q77-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q84-default.txt] >> test.py::test[weak_field-weak_field_type-default.txt] [GOOD] >> test.py::test[join-mergejoin_small_primary-] >> test.py::test[insert-append-with_view] [SKIPPED] >> test.py::test[type_v3-append_diff_layout1-] [SKIPPED] >> test.py::test[type_v3-append_struct-default.txt] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt] >> test.py::test[blocks-mod_uint64-] [GOOD] >> test.py::test[window-rank/opt-] >> test.py::test[insert-append_after_replace-default.txt] [SKIPPED] >> test.py::test[type_v3-append_struct-default.txt] [SKIPPED] >> test.py::test[type_v3-replace_diff_layout-] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt] [GOOD] >> test.py::test[blocks-pg-] >> test.py::test[select-dict_lookup-default.txt] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt] >> test.py::test[flatten_by-flatten_few_fields-] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by-] >> test.py::test[view-file_eval-] [SKIPPED] >> test.py::test[view-file_inner_library-] >> test.py::test[type_v3-replace_diff_layout-] [SKIPPED] >> test.py::test[view-file_inner_library-] [SKIPPED] >> test.py::test[union_all-union_all_multiple-default.txt] >> test.py::test[insert-append_sorted-to_sorted_desc] [SKIPPED] >> test.py::test[insert-insert_relabeled-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q84-default.txt] [SKIPPED] >> test.py::TestSqsSplitMergeFifoTables::test_fifo_merge_split [GOOD] |98.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[epochs-read_modified--Results] [GOOD] >> test.py::test[view-init_view_after_eval-default.txt] [SKIPPED] >> test.py::test[insert-replace_ordered_by_key_desc-default.txt] [SKIPPED] >> test.py::test[insert-use_anon_table_without_fill_fail-] >> test.py::test[pg-tpch-q04-default.txt] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail-] >> test.py::test[bigdate-table_common_type-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_semi_to_inner-] [GOOD] >> test.py::test[join-mergejoin_small_primary-off] >> test.py::test[hor_join-out_mem_limit-default.txt] [GOOD] >> test.py::test[hor_join-out_table_record-default.txt] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread_fail-] [SKIPPED] >> test.py::test[produce-process_with_python_as_struct-default.txt] >> test.py::test[view-secure_eval-] >> test.py::test[insert-use_anon_table_without_fill_fail-] [SKIPPED] >> test.py::test[join-bush_dis_in_in_in-] >> test.py::test[join-mergejoin_small_primary-off] [SKIPPED] >> test.py::test[join-premap_map_inner-] >> test.py::test[join-premap_map_inner-] [SKIPPED] >> test.py::test[view-secure_eval-] [SKIPPED] >> test.py::test[produce-process_with_python_as_struct-default.txt] [SKIPPED] >> test.py::test[window-full/leadlag-] >> test.py::test[produce-reduce_lambda_presort_twin_list-] >> test.py::test[join-pullup_context_dep-] >> test.py::test[aggregate-percentiles_ungrouped-] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt] >> test.py::test[blocks-div_uint64-] [GOOD] >> test.py::test[blocks-interval_div_scalar-] >> test.py::test[aggregate-group_by_ru_with_window_func-] [GOOD] >> test.py::test[aggregate-group_by_session_nopush-] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord-] [GOOD] >> test.py::test[join-mapjoin_on_very_complex_type-] >> test.py::test[join-mergejoin_small_primary-] [GOOD] >> test.py::test[join-premap_common_left_cross-off] >> test.py::test[aggregate-table_funcs_group_by-default.txt] >> test.py::test[blocks-pg-] [GOOD] >> test.py::test[blocks-pg_from_dates-] >> test.py::test[join-premap_common_left_cross-off] [SKIPPED] >> test.py::test[join-premap_merge_with_remap-] [SKIPPED] >> test.py::test[join-split_to_list_as_key-off] >> test.py::test[window-rank/opt-] [GOOD] >> test.py::test[join-split_to_list_as_key-off] [SKIPPED] >> test.py::test[join-yql-14829_leftonly-] >> test.py::test[select-select_concrete_detailed_columns-default.txt] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt] >> test.py::test[window-rank/unordered-] >> test.py::test[join-yql-14829_leftonly-] [SKIPPED] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt] >> test.py::test[select-table_content_from_sort_desc-default.txt] [SKIPPED] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_fail-] [SKIPPED] >> Replication::Types [GOOD] >> Replication::PauseAndResumeReplication >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt] [SKIPPED] >> test.py::test[order_by-order_by_value_desc-default.txt] [GOOD] |98.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff-] >> test.py::test[hor_join-out_table_record-default.txt] [GOOD] >> test.py::test[join-bush_dis_in_in_in-] [GOOD] >> test.py::test[pg-aggregate_combine_all-] [SKIPPED] >> test.py::test[produce-reduce_lambda_presort_twin_list-] [GOOD] >> test.py::test[like-like_clause-default.txt] >> test.py::test[join-mapjoin_dup_key-] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff-] [SKIPPED] >> test.py::test[union_all-union_all_multiple-default.txt] [GOOD] >> test.py::test[view-file_inner_udf-] >> test.py::test[select-from_in_front-default.txt] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt] >> test.py::test[join-convert_key-off] [SKIPPED] >> test.py::test[join-flatten_columns1-off] >> test.py::test[join-premap_common_cross-] [SKIPPED] >> test.py::test[join-premap_common_semi-off] >> test.py::test[table_range-concat_sorted_max_tables-] [SKIPPED] >> test.py::test[type_v3-bare_yson-] >> test.py::test[produce-reduce_multi_in_sampling-sorted] >> test.py::test[hor_join-sorted_out_mix-] >> test.py::test[pg-select_qstarref2-default.txt] [SKIPPED] >> test.py::test[join-flatten_columns1-off] [SKIPPED] >> test.py::test[type_v3-bare_yson-] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix-] [SKIPPED] >> test.py::test[select-scalar_subquery-default.txt] >> test.py::test[weak_field-weak_field_in_group_by-] >> test.py::test[insert-append_missing_null-default.txt] >> test.py::test[view-file_inner_udf-] [SKIPPED] >> test.py::test[produce-reduce_multi_in_sampling-sorted] [SKIPPED] >> test.py::test[join-left_cast_to_string-] >> test.py::test[join-premap_common_semi-off] [SKIPPED] >> test.py::test[flatten_by-flatten_with_group_by-] [GOOD] >> test.py::test[ansi_idents-escaped_udf_name-default.txt] [GOOD] >> test.py::test[pg-tpcds-q04-default.txt] >> test.py::test[insert-append_missing_null-default.txt] [SKIPPED] >> test.py::test[weak_field-few_source_different_columns-] >> test.py::test[hor_join-table_record-] >> test.py::test[sampling-direct_read-] [SKIPPED] >> test.py::test[sampling-mapjoin_right_sample-default.txt] >> test.py::test[join-pullup_exclusion-off] >> test.py::test[binding-table_filter_binding-default.txt] [SKIPPED] >> test.py::test[insert-keepmeta-with_view] [SKIPPED] >> test.py::test[insert_monotonic-several2-default.txt] >> test.py::test[weak_field-few_source_different_columns-] [SKIPPED] >> test.py::test[weak_field-weak_field-] >> test.py::test[sampling-mapjoin_right_sample-default.txt] [SKIPPED] >> test.py::test[schema-concat-] [SKIPPED] >> test.py::test[schema-diffrerent_schemas-] >> test.py::test[window-full/leadlag-] [GOOD] >> test.py::test[window-full/session_aliases-] >> test.py::test[insert_monotonic-several2-default.txt] [SKIPPED] >> test.py::test[join-bush_in-off] >> test.py::test[binding-table_from_binding_inferscheme-default.txt] >> test.py::test[pg-tpcds-q04-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q35-default.txt] |98.8%| [TM] {RESULT} ydb/tests/functional/serializable/py3test >> test.py::test[join-bush_in-off] [SKIPPED] >> test.py::test[join-pullup_exclusion-off] [SKIPPED] >> test.py::test[join-force_merge_join-default.txt] >> test.py::test[join-pullup_context_dep-] [GOOD] >> test.py::test[join-star_join_inners-] >> test.py::test[join-pullup_left-] >> test.py::test[pg-tpcds-q35-default.txt] [SKIPPED] >> test.py::test[blocks-sort_one_desc-] [GOOD] >> test.py::test[case-case_val_then_else-default.txt] >> test.py::test[pg-tpcds-q40-default.txt] >> test.py::test[join-mapjoin_on_very_complex_type-] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-off] >> test.py::test[join-mapjoin_partial_uniq_keys-off] [SKIPPED] >> test.py::test[join-mergejoin_force_align2-] >> test.py::test[bigdate-table_int_cast-default.txt] [GOOD] >> test.py::test[blocks-boolean_ops-] |98.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[bigdate-table_common_type-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_force_align2-] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-] >> test.py::test[pg-tpcds-q40-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q57-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q58-default.txt] >> test.py::test[aggregate-table_funcs_group_by-default.txt] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt] >> test.py::test[blocks-pg_from_dates-] [GOOD] >> test.py::test[blocks-pg_to_dates-] >> test.py::test[pg-tpcds-q58-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q66-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q82-default.txt] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_NONE-client0] [GOOD] >> test.py::test[pg-tpcds-q82-default.txt] [SKIPPED] >> test.py::test[produce-fuse_reduces_with_presort-] [SKIPPED] >> test.py::test[produce-process_multi_in-] [SKIPPED] >> test.py::test[ql_filter-integer_single-] [SKIPPED] >> test.py::test[result_types-containers-default.txt] [SKIPPED] >> test.py::test[sampling-bind_join_right-default.txt] [SKIPPED] >> test.py::test[sampling-map-keyfilter] >> test_clickbench.py::TestClickbench::test_clickbench[0] >> test.py::test[sampling-map-keyfilter] [SKIPPED] >> test.py::test[schema-fake_column-default.txt] [SKIPPED] >> test.py::test[select-braces-default.txt] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] >> test.py::test[weak_field-weak_field_in_group_by-] [GOOD] >> test.py::test[weak_field-weak_field_num_access-] >> test.py::test[like-like_clause-default.txt] [GOOD] >> test.py::test[lineage-list_literal3-default.txt] [SKIPPED] >> test.py::test[lineage-window_session-default.txt] [SKIPPED] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input-] [SKIPPED] >> test.py::test[optimizers-multi_to_empty_constraint-] |98.9%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[select-scalar_subquery-default.txt] [GOOD] >> test.py::test[window-rank/unordered-] [GOOD] >> test.py::test[window-win_expr_bounds-] >> test.py::test[select-select_all-default.txt] >> test.py::test[window-win_expr_bounds-] [SKIPPED] >> test.py::test[window-win_func_over_group_by-] >> test.py::test[join-left_cast_to_string-] [GOOD] >> test.py::test[join-left_only_semi_and_other-off] [SKIPPED] >> test.py::test[schema-diffrerent_schemas-] [GOOD] >> test.py::test[schema-insert-read_schema] [SKIPPED] >> test.py::test[schema-read_schema_other-] >> test.py::test[weak_field-weak_field-] [GOOD] >> test.py::test[hor_join-table_record-] [GOOD] >> test.py::test[in-in_with_table_of_tuples-default.txt] [SKIPPED] >> test.py::test[in-yql-10038-default.txt] [SKIPPED] >> test.py::test[in-yql-14677-default.txt] >> test.py::test[join-pullup_left-] [GOOD] >> test.py::test[join-pullup_rownumber-off] [SKIPPED] >> test.py::test[join-yql-12022-off] [SKIPPED] >> test.py::test[join-yql-14829_leftonly-off] >> test.py::test[join-lookupjoin_semi_1o-] >> test.py::test[weak_field-weak_field_strict-] >> test.py::test[window-full/session_aliases-] [GOOD] >> test.py::test[window-generic/session_aliases-] >> test.py::test[case-case_val_then_else-default.txt] [GOOD] >> test.py::test[case-case_val_when_then-default.txt] >> test.py::test[join-yql-14829_leftonly-off] [SKIPPED] >> test.py::test[key_filter-string_with_legacy-] >> test.py::test[blocks-interval_div_scalar-] [GOOD] >> test.py::test[blocks-mul_uint64_opt2-] >> test.py::test[join-mergejoin_narrows_output_sort-] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-off] [SKIPPED] >> test.py::test[join-nested_semi_join-off] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-] >> test.py::test[ansi_idents-basic_columns-default.txt] [GOOD] >> test.py::test[bigdate-table_io-default.txt] >> test.py::test[bigdate-table_io-default.txt] [SKIPPED] >> test.py::test[binding-drop_binding-] [SKIPPED] >> test.py::test[join-star_join_inners-] [GOOD] >> test.py::test[blocks-pg_to_dates-] [GOOD] >> test.py::test[blocks-add_uint64-] >> test.py::test[blocks-pg_top_sort-] >> test.py::test[key_filter-decimal-] [SKIPPED] >> test.py::test[key_filter-is_null-] >> test.py::test[join-force_merge_join-default.txt] [GOOD] >> test.py::test[join-left_cast_to_string-off] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_right-] >> test.py::test[binding-table_from_binding_inferscheme-default.txt] [GOOD] >> test.py::test[binding-table_regexp_binding-] >> test.py::test[binding-table_regexp_binding-] [SKIPPED] >> test.py::test[blocks-add_uint16-] >> test.py::test[select-braces-default.txt] [GOOD] >> test.py::test[select-exists_with_table-default.txt] >> test.py::test[weak_field-weak_field_strict-] [GOOD] >> test.py::test[select-exists_with_table-default.txt] [SKIPPED] >> test.py::test[select-substring_v1-default.txt] >> test.py::test[window-win_func_over_group_by-] [GOOD] >> test.py::test[ypath-multi_key-default.txt] >> test.py::test[weak_field-weak_field_num_access-] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt] >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact-] >> test.py::test[blocks-boolean_ops-] [GOOD] >> test.py::test[select-select_all-default.txt] [GOOD] >> test.py::test[select-trivial_group_by-default.txt] >> test.py::test[blocks-combine_all_min-] >> test.py::test[optimizers-multi_to_empty_constraint-] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-] [GOOD] >> test.py::test[join-premap_nonseq_flatmap-off] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input-] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test.py::test[join-pullup_null_column-off] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_rename-off] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns-] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_BASIC-client0] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> test.py::test[case-case_val_when_then-default.txt] [GOOD] >> test.py::test[window-generic/session_aliases-] [GOOD] >> test.py::test[window-lagging/aggregations-] >> test.py::test[case-case_when_then-default.txt] >> test.py::test[blocks-mul_uint64_opt2-] [GOOD] >> test.py::test[column_group-groups-perusage] >> test.py::test[blocks-add_uint64-] [GOOD] >> test.py::test[blocks-combine_all_count_filter-] >> test.py::test[in-yql-14677-default.txt] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] >> test.py::test[insert-drop_sortness-] [SKIPPED] >> test.py::test[insert-override-with_read_udf] [SKIPPED] >> test.py::test[insert-unique_distinct_hints-] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> test.py::test[insert-unique_distinct_hints-] [SKIPPED] >> test.py::test[join-compact_join-] [SKIPPED] >> test.py::test[join-equi_join_by_expr-] >> test.py::test[key_filter-string_with_legacy-] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt] >> test.py::test[window-yql-14738-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint16-] [GOOD] >> test.py::test[schema-read_schema_other-] [GOOD] >> test.py::test[schema-select_all-row_spec_hide_sort] >> test.py::test[ypath-limit_with_key-default.txt-ForceBlocks] >> test.py::test[blocks-coalesce_complex-default.txt] [SKIPPED] >> test.py::test[blocks-combine_all_some-] >> test.py::test[join-lookupjoin_semi_1o-] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-] >> test.py::test[select-substring_v1-default.txt] [GOOD] >> test.py::test[select-trivial_order_by-default.txt] >> test.py::test[weak_field-weak_field_real_col-default.txt] [GOOD] >> test.py::test[window-current/session_extended-] >> test.py::test[select-trivial_group_by-default.txt] [GOOD] >> test.py::test[select-where_not_null-] >> test.py::test[ypath-multi_key-default.txt] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right-] [GOOD] >> test.py::test[blocks-pg_top_sort-] [GOOD] >> test.py::test[blocks-top_sort_one_desc-] >> test.py::test[join-lookupjoin_bug7646_subst-] >> test.py::test[window-full/aggregations_leadlag_compact-] [GOOD] >> test.py::test[window-full/session_aliases_compact-] >> test.py::test[key_filter-is_null-] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic] [SKIPPED] >> test.py::test[blocks-combine_all_min-] [GOOD] >> test.py::test[blocks-combine_all_minmax_nested-] >> test.py::test[join-lookupjoin_bug7646_subst-] [SKIPPED] >> test.py::test[join-lookupjoin_bug8533-] >> test.py::test[limit-limit-dynamic] [SKIPPED] >> test.py::test[limit-limit_offset-default.txt] >> test.py::test[column_group-groups-perusage] [GOOD] >> test.py::test[case-case_when_then-default.txt] [GOOD] >> test.py::test[column_group-hint_anon_groups-perusage] >> test.py::test[column_group-hint_anon_groups-perusage] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail-] >> test.py::test[join-yql-10654_pullup_with_sys_columns-] [GOOD] >> test.py::test[key_filter-contains_optional-] >> test.py::test[join-equi_join_by_expr-] [GOOD] >> test.py::test[join-full_equal_null-] >> test.py::test[column_group-insert_diff_groups2_fail-] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail-] [SKIPPED] >> test.py::test[column_order-join-] >> test.py::test[column_group-length-perusage] >> test.py::test[blocks-combine_all_count_filter-] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested-] >> test.py::test[optimizers-yql-2582_limit_for_join_input-] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt] >> test.py::test[lambda-lambda_simple-default.txt] [GOOD] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt] [SKIPPED] >> test.py::test[order_by-SortByTwoFields-] >> test.py::test[select-trivial_order_by-default.txt] [GOOD] >> test.py::test[join-mapjoin_with_empty_read-] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt] >> test.py::test[join-mapjoin_with_empty_struct-] >> test.py::test[window-lagging/aggregations-] [GOOD] >> test.py::test[select-where_not_null-] [GOOD] >> test.py::test[blocks-top_sort_one_desc-] [GOOD] >> test.py::test[window-row_number_to_map-default.txt] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt] >> test.py::test[blocks-top_sort_two_mix-] >> test.py::test[schema-select_all-row_spec_hide_sort] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field] >> test.py::test[join-lookupjoin_bug8533-] [GOOD] >> test.py::test[blocks-combine_all_some-] [GOOD] >> test.py::test[blocks-combine_hashed_max-] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> test.py::test[join-lookupjoin_not_selected-] >> test.py::test[window-current/session_extended-] [GOOD] >> test.py::test[limit-limit_offset-default.txt] [GOOD] >> test.py::test[window-full/noncompact_with_nulls_tuple_key-] >> Replication::PauseAndResumeReplication [GOOD] >> test.py::test[limit-yql-8611_calc_peephole-] [SKIPPED] >> test.py::test[lineage-window_tablerow-default.txt] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt] [SKIPPED] >> test.py::test[multicluster-pull-default.txt] [SKIPPED] >> test.py::test[optimizers-flatmap_with_non_struct_out-] [SKIPPED] >> test.py::test[optimizers-group_visit_lambdas-] [SKIPPED] >> test.py::test[key_filter-contains_optional-] [GOOD] >> test.py::test[window-full/session_aliases_compact-] [GOOD] >> test.py::test[optimizers-sort_by_nonstrict_const-] [SKIPPED] >> test.py::test[blocks-combine_hashed_minmax_nested-] [GOOD] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt] >> test.py::test[key_filter-mixed_sort-] [SKIPPED] >> test.py::test[blocks-date_equals_scalar-] >> test.py::test[blocks-combine_all_minmax_nested-] [GOOD] >> test.py::test[blocks-compare_dates_floats_bools-] [SKIPPED] >> test.py::test[window-row_number_no_part_multi_input-default.txt] [SKIPPED] >> test.py::test[key_filter-string_with-default.txt] >> test.py::test[blocks-date_less_or_equal_scalar-] >> test.py::test[window-udaf_window-] [SKIPPED] >> test.py::test[limit-yql-7900_empty_sorted_without_keys-default.txt] [GOOD] >> test.py::test[lineage-select_field-default.txt] [SKIPPED] >> test.py::test[lineage-window_one-default.txt] >> test.py::test[column_group-length-perusage] [GOOD] >> test.py::test[column_order-align_publish-] [SKIPPED] >> test.py::test[column_order-select_sample-default.txt] >> test.py::test[window-win_by_all_percentile_interval-default.txt] >> test.py::test[column_order-join-] [GOOD] >> test.py::test[join-full_equal_null-] [GOOD] >> test.py::test[lineage-window_one-default.txt] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset-] >> test.py::test[column_order-select_distinct_star-default.txt] >> test.py::test[join-full_trivial-] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update >> test.py::test[window-row_number_to_map-default.txt] [GOOD] >> test.py::test[order_by-SortByTwoFields-] [GOOD] >> test.py::test[order_by-assume_over_input_desc-] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted-] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt] >> test.py::test[order_by-native_desc_assume_with_transform-] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-] [GOOD] >> test.py::test[blocks-top_sort_two_mix-] [GOOD] >> test.py::test[column_order-insert-] |98.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part5/py3test >> test.py::test[ypath-multi_key-default.txt] [GOOD] >> test.py::test[column_order-insert-] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary_with_retry-off] [SKIPPED] >> test.py::test[simple_columns-no_simple_columns_tablerow-default.txt] [GOOD] >> test.py::test[column_order-insert_with_reorder_cols-] [SKIPPED] >> test.py::test[params-complex_yson-] [SKIPPED] >> test.py::test[simple_columns-simple_columns_subreq-default.txt] >> test.py::test[pg-select_columnref2-default.txt] [SKIPPED] >> test.py::test[pg-select_qstarref1-default.txt] >> test.py::test[join-mergejoin_force_per_link-off] >> test.py::test[window-full/noncompact_with_nulls_tuple_key-] [GOOD] >> test.py::test[window-full/session-] >> test.py::test[join-lookupjoin_not_selected-] [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> test.py::test[schema-select_all_inferschema-extra_field] [GOOD] >> test.py::test[schema-select_with_map-partial_read_schema] >> test.py::test[count-count_no_grouping-default.txt] >> test.py::test[pg-select_qstarref1-default.txt] [SKIPPED] >> test.py::test[pg-select_table1-default.txt] >> test.py::test[blocks-combine_hashed_max-] [GOOD] >> test.py::test[blocks-combine_hashed_pg-] >> test.py::test[join-mergejoin_force_per_link-off] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested-] >> test.py::test[join-lookupjoin_semi_empty-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq-off] [SKIPPED] >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-] >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] >> test.py::test[pg-select_table1-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q17-default.txt] >> test.py::test[pg-tpcds-q17-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q43-default.txt] >> test.py::test[column_order-select_distinct_star-default.txt] [GOOD] >> test.py::test[column_order-select_limit_offset-default.txt] >> test.py::test[pg-tpcds-q43-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q86-default.txt] >> test.py::test[ypath-limit_with_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with-default.txt] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test.py::test[lineage-select_table_row-default.txt] [SKIPPED] >> test.py::test[optimizers-direct_row_after_merge-] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset-] [GOOD] >> test.py::test[optimizers-field_subset_for_multiusage-] [SKIPPED] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt] [GOOD] >> test.py::test[ypath-limit_with_key-default.txt-Results] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> test.py::test[pg-tpcds-q86-default.txt] [SKIPPED] >> test.py::test[window-win_by_all_percentile_interval-default.txt] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map-] >> test.py::test[optimizers-test_no_aggregate_split-] >> test.py::test[window-win_func_aggr_4func_sort-] >> test.py::test[optimizers-keepworld_emptyflatmap-] [SKIPPED] >> test.py::test[optimizers-yql-6008_limit_after_map-] [SKIPPED] >> test.py::test[join-full_trivial-] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt] >> test.py::test[optimizers-yql-9297_publish_ytcopy-] [SKIPPED] >> test.py::test[join-inner_grouped-off] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate-] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads-] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_BASIC-client0] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt] [SKIPPED] >> test.py::test[order_by-native_desc_sort-] [SKIPPED] >> test.py::test[blocks-date_equals_scalar-] [GOOD] >> test.py::test[join-inner_grouped-off] [SKIPPED] >> test.py::test[blocks-date_less_or_equal_scalar-] [GOOD] >> test.py::test[blocks-distinct_mixed_all-] >> test.py::test[pg-tpch-q06-default.txt] >> test.py::test[order_by-order_by_expr_simple-] >> test.py::test[blocks-date_sub_interval-] >> test.py::test[join-join_no_correlation_in_order_by-off] [SKIPPED] >> test.py::test[join-join_table_conflict_fail-] >> test.py::test[simple_columns-simple_columns_subreq-default.txt] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted-] [GOOD] >> test.py::test[window-win_func_aggr_stat-] >> test.py::test[pg-tpch-q06-default.txt] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/replication/unittest >> Replication::PauseAndResumeReplication [GOOD] Test command err: DDL: CREATE TABLE `Table_2568450878862886506` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_2568450878862886506` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; ; CREATE TRANSFER `Transfer_2568450878862886506` FROM `Topic_2568450878862886506` TO `Table_2568450878862886506` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4277/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Scheme operation failed, status: ExecError, reason: The transfer is only available in the Enterprise version } >>>>> EXPECTED: The transfer is only available in the Enterprise version DDL: DROP TABLE `Table_2568450878862886506` DDL: DROP TOPIC `Topic_2568450878862886506` DDL: CREATE TOPIC `Topic_477247201723781013` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; ; CREATE TRANSFER `Transfer_477247201723781013` FROM `Topic_477247201723781013` TO `Table_477247201723781013` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4277/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Scheme operation failed, status: ExecError, reason: The transfer destination path '/local/Table_477247201723781013' not found } >>>>> EXPECTED: The transfer destination path '/local/Table_477247201723781013' not found DDL: DROP TOPIC `Topic_477247201723781013` DDL: CREATE TABLE `SourceTable_4328077576632913388` ( Key Uint32, Key2 Uuid, v01 Uuid, v02 Uuid NOT NULL, v03 Double, PRIMARY KEY (Key, Key2) ); >>>>> Query: UPSERT INTO `SourceTable_4328077576632913388` (Key,Key2,v01,v02,v03) VALUES ( 1, CAST("00078af5-0000-0000-6c0b-040000000000" as Uuid), CAST("00078af5-0000-0000-6c0b-040000000001" as Uuid), UNWRAP(CAST("00078af5-0000-0000-6c0b-040000000002" as Uuid)), CAST("311111111113.222222223" as Double) ); DDL: CREATE ASYNC REPLICATION `Replication_4328077576632913388` FOR `SourceTable_4328077576632913388` AS `Table_4328077576632913388` WITH ( CONNECTION_STRING = 'grpc://localhost:4277/?database=local' ); >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_4328077576632913388` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_4328077576632913388]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_4328077576632913388` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=18 count=0 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_4328077576632913388` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=17 count=1 DDL: DROP ASYNC REPLICATION `Replication_4328077576632913388`; DDL: DROP TABLE `SourceTable_4328077576632913388` DDL: CREATE TABLE `SourceTable_18204482521383868999` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_18204482521383868999` FOR `SourceTable_18204482521383868999` AS `Table_18204482521383868999` WITH ( CONNECTION_STRING = 'grpc://localhost:4277/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_18204482521383868999` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_18204482521383868999` ORDER BY `Message` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_18204482521383868999]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Message` FROM `Table_18204482521383868999` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_18204482521383868999` ORDER BY `Message` Attempt=17 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_18204482521383868999` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_18204482521383868999` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_18204482521383868999` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_18204482521383868999` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_18204482521383868999` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_18204482521383868999` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_18204482521383868999` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_18204482521383868999` SET ( STATE = "StandBy" ); DDL: DROP ASYNC REPLICATION `Replication_18204482521383868999`; DDL: DROP TABLE `SourceTable_18204482521383868999` >> test.py::test[pg-tpch-q20-default.txt] >> test.py::test[tpch-q11-default.txt] >> test.py::test[join-join_table_conflict_fail-] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt] [GOOD] >> test.py::test[table_range-limit_with_table_path_over_sorted_range-] >> test.py::test[join-join_without_correlation_names-] >> test.py::test[pg-tpch-q20-default.txt] [SKIPPED] >> test.py::test[table_range-limit_with_table_path_over_sorted_range-] [SKIPPED] >> test.py::test[table_range-table_funcs_expr-] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_into_udf--Results] >> test.py::test[produce-process_streaming-default.txt] >> test.py::test[table_range-table_funcs_expr-] [SKIPPED] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> test.py::test[tpch-q20-default.txt] >> test.py::test[schema-select_with_map-partial_read_schema] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-] |98.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> test.py::test[join-mapjoin_early_rewrite_star-] [GOOD] >> test.py::test[schema-user_schema_append-] [SKIPPED] >> test.py::test[count-count_no_grouping-default.txt] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-] >> test.py::test[csee-yql-7237-] [SKIPPED] >> test.py::test[select-backtick_with_escapes-default.txt] >> test.py::test[distinct-distinct_list_after_group-default.txt] >> test.py::test[column_order-select_sample-default.txt] [GOOD] >> test.py::test[dq-precompute_asyncfile-] [SKIPPED] >> test.py::test[window-full/session-] [GOOD] >> test.py::test[expr-inline_call-] >> test.py::test[window-lagging/aggregations_leadlag-] >> test.py::test[blocks-combine_hashed_pg-] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.WARNING] >> test.py::test[blocks-interval_div-] >> test.py::test[column_order-select_limit_offset-default.txt] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> test.py::test[optimizers-unused_columns_window_no_payloads-] [GOOD] >> test.py::test[optimizers-yql-5833-table_content-] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test.py::test[optimizers-test_no_aggregate_split-] [GOOD] >> test.py::test[order_by-order_by_expr_simple-] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> test.py::test[ypath-limit_with_key-default.txt-Results] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-ForceBlocks] >> test.py::test[order_by-yql-19598-] [SKIPPED] >> test.py::test[pg-drop_table-] [SKIPPED] >> test.py::test[pg-join_using_tables1-default.txt] [SKIPPED] >> test.py::test[pg-point-default.txt] [SKIPPED] >> test.py::test[optimizers-unused_columns_group-] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt] [SKIPPED] >> test.py::test[pg-select_limit-default.txt] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> test.py::test[pg-select_limit-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q02-default.txt] >> test.py::test[blocks-date_sub_interval-] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort-] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested-] >> test.py::test[window-win_func_aggr_hist-] >> test.py::test[pg-tpcds-q02-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q46-default.txt] >> test.py::test[blocks-distinct_mixed_all-] [GOOD] >> test.py::test[blocks-minmax_tuple-] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.ERROR] >> test.py::test[produce-process_streaming-default.txt] [GOOD] >> test.py::test[pg-tpcds-q46-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q03-default.txt] >> test.py::test[join-join_without_correlation_names-] [GOOD] >> test.py::test[join-left_semi_with_other-] >> test.py::test[ql_filter-integer_members_eval-] [SKIPPED] >> test.py::test[sampling-direct_read-dynamic] [SKIPPED] >> test.py::test[pg-tpch-q03-default.txt] [SKIPPED] >> test.py::test[sampling-join_left_sample-default.txt] [SKIPPED] >> test.py::test[sampling-table_content-] >> test.py::test[expr-inline_call-] [GOOD] >> test.py::test[file-parse_file_in_select_as_int-] [SKIPPED] >> test.py::test[sampling-table_content-] [SKIPPED] >> test.py::test[sampling-take_with_sampling-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q13-default.txt] [SKIPPED] >> test.py::test[flatten_by-flatten_columns-default.txt] |98.9%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/py3test >> test.py::test[tpch-q11-default.txt] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt] [GOOD] >> test.py::test[select-cast_double_to_uint32-default.txt] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt] [GOOD] >> test.py::test[dq-wrong_script_segf-] >> test.py::test[schema-user_schema_existing_column-] >> test.py::test[produce-process_rows_and_filter-] [SKIPPED] >> test.py::test[produce-reduce_all-default.txt] >> test.py::test[join-mergejoin_with_different_key_names_norename-] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm-] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off] [SKIPPED] >> test.py::test[dq-wrong_script_segf-] [SKIPPED] >> test.py::test[produce-reduce_all-default.txt] [SKIPPED] >> test.py::test[produce-reduce_subfields-] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream-] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_FULL-client0] >> test.py::test[join-order_of_qualified-off] [SKIPPED] >> test.py::test[join-premap_common_multiparents_no_premap-] >> test.py::test[union_all-path_and_record-default.txt] >> test.py::test[flatten_by-flatten_mode-default.txt] >> test.py::test[blocks-interval_div-] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-off] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted-off] [SKIPPED] >> test.py::test[blocks-top_sort_two_desc-] >> test.py::test[join-order_of_qualified-] >> test.py::test[produce-reduce_with_python_few_keys_stream-] [SKIPPED] >> test.py::test[produce-reduce_with_python_presort_stream-] [SKIPPED] >> test.py::test[flatten_by-flatten_mode-default.txt] [SKIPPED] >> test.py::test[hor_join-group_ranges-] >> test.py::test[join-premap_common_multiparents_no_premap-] [SKIPPED] >> test.py::test[join-pullup_left_semi-off] >> test.py::test[sampling-bind_expr_subquery-default.txt] >> test.py::test[hor_join-group_ranges-] [SKIPPED] >> test.py::test[in-in_types_cast-default.txt] >> test.py::test[join-pullup_left_semi-off] [SKIPPED] >> test.py::test[join-pullup_null_column-] >> test.py::test[distinct-distinct_having_no_agg-default.txt] [GOOD] >> test.py::test[dq-precompute_result-default.txt] >> test.py::test[dq-precompute_result-default.txt] [SKIPPED] >> test.py::test[expr-non_persistable_insert_into_fail-] [SKIPPED] >> test.py::test[window-win_func_aggr_stat-] [GOOD] >> test.py::test[hor_join-group_sampling-] >> test_diff_processing.py::TestTpchDiffProcessing::test_tpch[CheckCanonicalPolicy.ERROR] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.NO] >> test.py::test[optimizers-yql-5833-table_content-] [GOOD] >> test.py::test[order_by-literal_desc-] [SKIPPED] >> test.py::test[window-win_func_first_last-] >> test.py::test[hor_join-group_sampling-] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props-] >> test.py::test[order_by-union_all-] [SKIPPED] >> test.py::test[pg-join_using_tables4-default.txt] [SKIPPED] >> test.py::test[pg-pg_column_case-] [SKIPPED] >> test.py::test[pg-tpcds-q10-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q29-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q39-default.txt] [SKIPPED] |98.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/py3test >> test.py::TestSqsSplitMergeFifoTables::test_fifo_merge_split [GOOD] >> test.py::test[pg-tpcds-q44-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q72-default.txt] >> test.py::test[pg-tpcds-q72-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q75-default.txt] >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_fail-] [SKIPPED] >> test.py::test[optimizers-unused_columns_group-] [GOOD] >> test.py::test[order_by-changed_sort_with_limit-] >> test.py::test[blocks-lazy_nonstrict_nested-] [GOOD] >> test.py::test[blocks-minmax_strings_filter-] >> test.py::test[window-win_func_aggr_hist-] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part-] >> test.py::test[order_by-changed_sort_with_limit-] [SKIPPED] >> test.py::test[produce-process_with_python-default.txt] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream-] [SKIPPED] >> test.py::test[join-left_semi_with_other-] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee-off] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o-] >> test.py::test[produce-reduce_lambda_list_table-] >> test.py::test[order_by-order_with_null-default.txt] >> test.py::test[tpch-q20-default.txt] [GOOD] >> test.py::test[type_v3-append_diff_layout2-] [SKIPPED] >> test.py::test[type_v3-ignore_v3_hint-opt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=4171515) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |98.9%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/py3test >> test.py::test[sampling-bind_expr_subquery-default.txt] [GOOD] |98.9%| [TM] {RESULT} ydb/tests/fq/common/py3test >> test.py::test[sampling-subquery_limit-default.txt] [SKIPPED] >> test.py::test[sampling-zero_percentage-] [SKIPPED] >> test.py::test[schema-user_schema_mix3-] >> test.py::test[window-lagging/aggregations_leadlag-] [GOOD] >> test.py::test[window-leading/aggregations_leadlag-] >> test.py::test[join-pullup_null_column-] [GOOD] >> test.py::test[join-pullup_rownumber-] >> test.py::test[select-cast_double_to_uint32-default.txt] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt] >> test.py::test[blocks-minmax_tuple-] [GOOD] >> test.py::test[blocks-sort_two_mix-] >> test.py::test[union_all-path_and_record-default.txt] [GOOD] >> test.py::test[union_all-union_all_multiin-] >> test.py::test[in-in_types_cast-default.txt] [GOOD] >> test.py::test[in-in_with_tuple-default.txt] |99.0%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[schema-user_schema_existing_column-] [GOOD] >> test.py::test[select-swap_columns-default.txt] >> test.py::test[blocks-top_sort_two_desc-] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt] >> test.py::test[hor_join-yql-12610_old_table_props-] [GOOD] >> test.py::test[hor_join-yql19332_aux_cols-] [SKIPPED] >> test.py::test[insert-merge_publish-] [SKIPPED] >> test.py::test[insert-select_after_replace-default.txt] [SKIPPED] >> test.py::test[insert-values_subquery-] |99.0%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part4/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[flatten_by-flatten_columns-default.txt] [GOOD] >> test.py::test[in-in_enum_single1-default.txt] >> test.py::test[insert-values_subquery-] [SKIPPED] >> test.py::test[insert_monotonic-break_sort_fail-] >> test.py::test[insert_monotonic-break_sort_fail-] [SKIPPED] >> test.py::test[join-bush_in-] >> test.py::test[blocks-minmax_strings_filter-] [GOOD] >> test.py::test[window-win_func_first_last-] [GOOD] >> test.py::test[window-win_func_lead_lag_worm-] >> test.py::test[join-lookupjoin_inner_1o-] [GOOD] >> test.py::test[join-mapjoin_left_null_column-] >> test.py::test[blocks-nested_optionals-] >> test.py::test[window-win_func_lead_lag_worm_with_part-] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt] >> test.py::test[order_by-order_with_null-default.txt] [GOOD] >> test.py::test[pg-name-] [SKIPPED] >> test.py::test[pg-select_common_type_unionall-] [SKIPPED] >> test.py::test[pg-tpcds-q01-default.txt] >> test.py::test[produce-reduce_lambda_list_table-] [GOOD] >> test.py::test[produce-reduce_lambda_presort_twin-] >> test.py::test[join-pullup_rownumber-] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf-] >> test.py::test[type_v3-ignore_v3_hint-opt] [GOOD] >> test.py::test[type_v3-uuid-] [SKIPPED] >> test.py::test[pg-tpcds-q01-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q27-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q30-default.txt] >> test.py::test[join-order_of_qualified-] [GOOD] >> test.py::test[join-premap_common_inner-off] [SKIPPED] >> test.py::test[union_all-inner_union_all_with_limits-default.txt] [SKIPPED] >> test.py::test[union_all-mix_map_and_project-] >> test.py::test[pg-tpcds-q30-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q45-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q55-default.txt] >> test.py::test[blocks-sort_two_mix-] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off] >> test.py::test[union_all-union_all_multiin-] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-ForceBlocks] [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromCommandLine [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt] >> ParseOptionsTest::NoDiscoveryCommandLine >> test.py::test[pg-tpcds-q55-default.txt] [SKIPPED] >> test.py::test[blocks-string_filter-] >> test.py::test[ypath-limit_with_range-default.txt-Results] >> ParseOptionsTest::NoDiscoveryCommandLine [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromActiveProfile >> test.py::test[join-premap_common_inner_both_sides-off] [SKIPPED] >> test.py::test[join-premap_common_multiparents-off] [SKIPPED] >> test.py::test[join-premap_common_multiparents_no_premap-off] [SKIPPED] >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] >> test.py::test[pg-tpcds-q64-default.txt] >> test.py::test[select-swap_columns-default.txt] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt] [SKIPPED] >> test.py::test[select-tablepathprefix-default.txt] >> ParseOptionsTest::EndpointAndDatabaseFromActiveProfile [GOOD] >> test.py::test[schema-user_schema_mix3-] [GOOD] >> ParseOptionsTest::EndpointAndDatabaseFromExplicitProfile >> test.py::test[join-premap_common_semi-] [SKIPPED] >> test.py::test[join-pullup_renaming-] >> test.py::test[coalesce-coalesce_few_real-default.txt] [GOOD] >> test.py::test[column_group-hint_anon-disable] >> test.py::test[pg-tpcds-q64-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q98-default.txt] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt] [SKIPPED] >> test.py::test[window-leading/aggregations_leadlag-] [GOOD] >> test.py::test[window-win_extract_members-default.txt] >> ParseOptionsTest::EndpointAndDatabaseFromExplicitProfile [GOOD] >> ParseOptionsTest::IamToken >> test.py::test[select-append_to_value-] >> test.py::test[select-complex_filter_with_order-default.txt] [GOOD] >> test.py::test[select-hits_count-] >> test.py::test[column_group-hint_anon-disable] [SKIPPED] >> test.py::test[column_group-hint_anon-single] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail4-] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail-] [SKIPPED] >> test.py::test[produce-process_with_udf_validate-default.txt] [SKIPPED] >> test.py::test[produce-reduce_lambda-] >> test.py::test[select-hits_count-] [SKIPPED] >> test.py::test[select-one_labeled_column-default.txt] >> test.py::test[column_group-publish-single] [SKIPPED] >> test.py::test[column_order-select_orderby-default.txt] >> ParseOptionsTest::IamToken [GOOD] >> ParseOptionsTest::YdbToken >> test.py::test[window-win_func_lead_lag_worm-] [GOOD] >> test.py::test[window-win_func_over_group_by_compl-] >> test.py::test[blocks-nested_optionals-] [GOOD] >> test.py::test[coalesce-coalesce-] >> test.py::test[in-in_with_tuple-default.txt] [GOOD] >> test.py::test[insert-append_sorted-] [SKIPPED] >> test.py::test[insert-keepmeta-] [SKIPPED] >> test.py::test[window-win_func_into_udf--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--Results] >> test.py::test[insert-udf_empty-] [SKIPPED] >> test.py::test[join-bush_dis_in-off] [SKIPPED] >> test.py::test[join-cbo_7tables-] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk-] >> test.py::test[join-bush_in-] [GOOD] >> test.py::test[join-cbo_7tables_only_common_join-] [SKIPPED] >> ParseOptionsTest::YdbToken [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf-] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-] >> ParseOptionsTest::StaticCredentials >> test.py::test[produce-reduce_lambda_presort_twin-] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple_difftype-] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk-off] [SKIPPED] >> test.py::test[join-equi_join_three_asterisk_eval-off] [SKIPPED] >> test.py::test[join-flatten_columns2-] >> test.py::test[produce-reduce_with_python_presort-] [SKIPPED] >> test.py::test[produce-reduce_with_python_row-] >> test.py::test[in-in_enum_single1-default.txt] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt] [SKIPPED] >> test.py::test[insert-part_sortness-] [SKIPPED] >> test.py::test[insert-part_sortness-desc] [SKIPPED] >> test.py::test[insert-replace_inferred-] [SKIPPED] >> test.py::test[produce-reduce_with_python_row-] [SKIPPED] >> test.py::test[sampling-bind_topsort-default.txt] [SKIPPED] >> test.py::test[schema-insert_sorted-row_spec] >> test_workload.py::TestYdbWorkload::test >> test.py::test[insert-yql-14538-] [SKIPPED] >> test.py::test[insert_monotonic-truncate_and_append-default.txt] [SKIPPED] >> test.py::test[join-bush_dis_in-] >> test.py::test[schema-insert_sorted-row_spec] [SKIPPED] >> test.py::test[schema-select_all-row_spec] >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> test.py::test[join-mapjoin_left_null_column-] [GOOD] >> test.py::test[join-mapjoin_left_null_column-off] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-] >> test.py::test[select-append_to_value-] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt] >> test.py::test[union_all-union_all_subexpr-default.txt] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt] >> test.py::test[select-tablepathprefix-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt] >> test.py::test[join-pullup_renaming-] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-] >> test.py::test[select-one_labeled_column-default.txt] [GOOD] >> test.py::test[select-optional_in_job-] >> test.py::test[produce-reduce_lambda-] [GOOD] >> test.py::test[produce-reduce_multi_out-] [SKIPPED] >> test.py::test[sampling-map-dynamic] >> test.py::test[column_order-select_orderby-default.txt] [GOOD] >> test.py::test[column_order-select_plain-default.txt] >> test.py::test[window-win_multiaggr_tuple-default.txt] [GOOD] >> test.py::test[sampling-map-dynamic] [SKIPPED] >> test.py::test[sampling-subquery_expr-default.txt] >> test.py::test[select-optional_in_job-] [SKIPPED] >> test.py::test[select-trivial_where-many] >> ParseOptionsTest::StaticCredentials [GOOD] >> ParseOptionsTest::AnonymousCredentials >> test.py::test[blocks-string_filter-] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt] [SKIPPED] >> test.py::test[schema-row_spec_with_default_values-] >> test.py::test[column_group-groups-single] >> test_clickbench.py::TestClickbench::test_clickbench[0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] >> ParseOptionsTest::AnonymousCredentials [GOOD] >> ParseOptionsTest::EnvPriority >> test.py::test[union_all-mix_map_and_project-] [GOOD] >> test.py::test[weak_field-weak_field_long_name-] >> ParseOptionsTest::EnvPriority [GOOD] >> YdbDump::NotNullTypeDump >> test.py::test[coalesce-coalesce-] [GOOD] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_FULL-client0] [GOOD] >> test.py::test[coalesce-coalesce_sugar-default.txt] >> YdbDump::NotNullTypeDump [GOOD] >> YdbTopic::SupportedCodecs_TopicCreate_DefaultValue >> test.py::test[window-win_extract_members-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt] [GOOD] >> YdbTopic::SupportedCodecs_TopicCreate_DefaultValue [GOOD] >> YdbTopic::SupportedCodecs_TopicCreate_UserValue >> test.py::test[window-win_func_lead_lag_opt-] >> test.py::test[table_range-tablepath_with_non_existing-] [SKIPPED] >> YdbTopic::SupportedCodecs_TopicCreate_UserValue [GOOD] >> YdbTopic::SupportedCodecs_TopicAlter >> test.py::test[schema-select_all-row_spec] [GOOD] >> test.py::test[schema-select_all_forceinferschema-] >> test.py::test[tpch-q15-default.txt] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-] [GOOD] >> test.py::test[join-premap_common_left_cross-] >> test.py::test[select-create_tuples-default.txt] >> test.py::test[join-selfjoin_on_sorted_with_filter-] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine-] >> test.py::test[join-star_join_mirror-off] [SKIPPED] >> test.py::test[join-star_join_semionly-off] [SKIPPED] >> test.py::test[join-two_aggrs-default.txt] >> test.py::test[join-equi_join_three_asterisk-] [GOOD] >> YdbTopic::SupportedCodecs_TopicAlter [GOOD] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_DefaultValue >> test.py::test[column_group-groups-single] [GOOD] >> test.py::test[select-trivial_where-many] [GOOD] >> test.py::test[join-premap_common_left_cross-] [SKIPPED] >> test.py::test[join-premap_map_semi-] >> test.py::test[join-flatten_columns1-] >> test.py::test[schema-row_spec_with_default_values-] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-] [GOOD] >> test.py::test[join-bush_dis_in-] [GOOD] >> test.py::test[join-convert_key-] >> test.py::test[schema-select_all-yamred_dsv] >> test.py::test[simple_columns-simple_columns_base_fail-] [SKIPPED] >> test.py::test[column_order-join_nosimple-] >> test_clickbench.py::TestClickbench::test_clickbench[1] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] >> test.py::test[join-right_trivial-off] [SKIPPED] >> test.py::test[join-flatten_columns2-] [GOOD] >> test.py::test[join-premap_map_semi-] [SKIPPED] >> test.py::test[join-star_join-off] >> test.py::test[join-grace_join2-] [SKIPPED] >> test.py::test[join-premap_no_premap-off] |99.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt] [SKIPPED] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_DefaultValue [GOOD] >> test.py::test[column_order-select_plain-default.txt] [GOOD] >> test.py::test[join-star_join-off] [SKIPPED] >> test.py::test[join-premap_no_premap-off] [SKIPPED] >> test.py::test[join-inner_with_select-] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_UserValue >> test.py::test[table_range-concat_sorted_with_key_diff-] [SKIPPED] >> test.py::test[type_v3-type_subset-] [SKIPPED] >> test.py::test[distinct-distinct_one_count-default.txt] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt] [SKIPPED] >> test.py::test[join-star_join_multi-] >> test.py::test[join-prune_keys-] [SKIPPED] >> test.py::test[join-pullup_context_dep-off] [SKIPPED] >> test.py::test[join-star_join_semionly_premap-off] [SKIPPED] >> test.py::test[join-yql-8131-off] >> test.py::test[schema-select_all-yamred_dsv] [SKIPPED] >> test.py::test[select-autoextract_source_value-default.txt] >> test.py::test[view-trivial_view_concat-] >> test.py::test[window-win_func_over_group_by_compl-] [GOOD] >> test.py::test[view-trivial_view_concat-] [SKIPPED] >> test.py::test[weak_field-weak_field_opt-] >> YdbTopic::SupportedCodecs_TopicConsumerAdd_UserValue [GOOD] >> YdbWorkloadTopic::Default_RunFull >> test.py::test[window-win_func_rank_by_opt_part-] >> test.py::test[weak_field-weak_field_long_name-] [GOOD] >> test.py::test[window-generic/aggregations_before_current-] >> test.py::test[coalesce-coalesce_sugar-default.txt] [GOOD] >> test.py::test[column_group-hint_anon-perusage] >> test.py::test[column_group-hint_anon-perusage] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail-] [SKIPPED] >> test.py::test[column_order-ordered_plus_native-] [SKIPPED] >> test.py::test[distinct-distinct_and_join-] >> test.py::test[schema-select_all_forceinferschema-] [GOOD] >> test.py::test[schema-select_all_inferschema_op-] >> test.py::test[window-win_func_lead_lag_opt-] [GOOD] >> test.py::test[ypath-empty_range-] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_map_combine-] [GOOD] >> test.py::test[window-generic/aggregations_mixed-] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_PROFILE-client0] >> test.py::test[join-convert_key-] [GOOD] >> test.py::test[join-full_equal_null-off] [SKIPPED] >> test.py::test[join-full_join-] >> test.py::test[join-two_aggrs-default.txt] [GOOD] >> test.py::test[join-yql-14847-] >> test.py::test[select-create_tuples-default.txt] [GOOD] >> test.py::test[select-dot_in_alias-default.txt] >> test.py::test[tpch-q15-default.txt] [GOOD] >> test.py::test[join-flatten_columns1-] [GOOD] >> test.py::test[join-full_equal_not_null-] >> test.py::test[tpch-q9-default.txt] >> test.py::test[column_order-join_nosimple-] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[3] >> test.py::test[column_order-select_subquery-default.txt] [SKIPPED] >> test.py::test[distinct-distinct_columns_after_group-default.txt] >> test.py::test[join-inner_with_select-] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off] [SKIPPED] |99.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part17/py3test >> test.py::test[window-win_multiaggr_tuple-default.txt] [GOOD] >> test.py::test[join-lookupjoin_bug7646_csee-] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty-] >> test.py::test[select-autoextract_source_value-default.txt] [GOOD] >> test.py::test[select-qualified_all_and_group_by-default.txt] >> test.py::test[join-yql-8131-off] [GOOD] >> test.py::test[json-json_exists/example-] >> test.py::test[window-win_func_rank_by_opt_part-] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key-] >> test.py::test[weak_field-weak_field_opt-] [GOOD] >> test.py::test[window-full/syscolumns-] >> test.py::test[window-generic/aggregations_before_current-] [GOOD] >> test.py::test[window-presort_window_partition_by_table-default.txt] >> test.py::test[window-full/syscolumns-] [SKIPPED] >> test.py::test[window-win_func_first_last_rev-] >> test.py::test[distinct-distinct_one_count-default.txt] [GOOD] >> test.py::test[flatten_by-flatten_and_where-] >> test.py::test[join-star_join_multi-] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix-] >> test.py::test[schema-select_all_inferschema_op-] [GOOD] >> test.py::test[join-full_join-] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix-] [SKIPPED] >> test.py::test[key_filter-ranges-] >> test.py::test[schema-select_operate_with_columns_simple-default.txt] >> test.py::test[join-full_trivial_udf_call-off] [SKIPPED] >> test.py::test[window-generic/aggregations_mixed-] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc-] >> test.py::test[select-dot_in_alias-default.txt] [GOOD] >> test.py::test[select-optional_as_warn-default.txt] >> test.py::test[join-join_with_duplicate_keys_on_sorted-off] [SKIPPED] >> test.py::test[join-left_join_null_column-off] >> test.py::test[distinct-distinct_and_join-] [GOOD] >> test.py::test[hor_join-yield_off-] [SKIPPED] >> test.py::test[join-left_join_null_column-off] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_left-] [SKIPPED] >> test.py::test[insert-trivial_literals_multirow-default.txt] [SKIPPED] >> test.py::test[join-lookupjoin_semi-] >> test.py::test[join-yql-14847-] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[3] [GOOD] >> test.py::test[join-cbo_4tables-] [SKIPPED] |99.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> test.py::test[join-emptyjoin_unused_keys-] [SKIPPED] >> test_clickbench.py::TestClickbench::test_clickbench[4] >> test.py::test[tpch-q9-default.txt] [GOOD] >> test.py::test[type_v3-json-] [SKIPPED] >> test.py::test[join-grace_join1-map] [SKIPPED] >> test.py::test[join-inner_grouped_by_expr-] >> test.py::test[type_v3-split-] [SKIPPED] >> test.py::test[distinct-distinct_columns_after_group-default.txt] [GOOD] >> test.py::test[distinct-distinct_window-default.txt] |99.0%| [TM] {RESULT} ydb/tests/functional/wardens/py3test >> test.py::test[udf-udaf_short-] >> test.py::test[udf-udaf_short-] [SKIPPED] >> test.py::test[union_all-mix_map_and_project-trivial_map] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows |99.0%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part6/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[join-lookupjoin_semi_empty-] [GOOD] >> test.py::test[join-lookupjoin_take_skip-] >> test.py::test[join-lookupjoin_take_skip-] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt-off] [SKIPPED] >> test.py::test[json-json_exists/example-] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key-] [GOOD] >> test.py::test[join-mergejoin_choose_primary-] >> test.py::test[window-win_lead_in_mem-default.txt] >> test.py::test[key_filter-empty_range_over_dynamic-] >> test.py::test[select-qualified_all_and_group_by-default.txt] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt] [SKIPPED] >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt] [SKIPPED] >> test.py::test[select-sample_limit_recordindex-] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt] >> test.py::test[window-win_func_first_last_rev-] [GOOD] |99.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part8/py3test >> test.py::test[ypath-empty_range-] [SKIPPED] >> test.py::test[schema-select_operate_with_columns_simple-default.txt] [GOOD] >> test.py::test[select-column_labels-default.txt] >> test.py::test[select-optional_as_warn-default.txt] [GOOD] >> test.py::test[select-select_all_group_by_column-] >> test.py::test[join-lookupjoin_semi-] [GOOD] >> test.py::test[join-lookupjoin_semi_1o-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-off] >> test.py::test[join-full_equal_not_null-] [GOOD] >> test.py::test[join-join_comp_common_table-] >> test.py::test[join-lookupjoin_semi_1o2o-off] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o-] >> test.py::test[key_filter-ranges-] [GOOD] >> test.py::test[like-like_clause_escape-default.txt] >> test.py::test[flatten_by-flatten_and_where-] [GOOD] >> test.py::test[flatten_by-flatten_one_field-] >> test_clickbench.py::TestClickbench::test_clickbench[4] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc-] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] >> test.py::test[window-win_func_part_by_expr_new-default.txt] |99.0%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part15/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[window-presort_window_partition_by_table-default.txt] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all-] >> test.py::test[distinct-distinct_window-default.txt] [GOOD] >> test.py::test[join-inner_grouped_by_expr-] [GOOD] >> test.py::test[join-inner_grouped_by_expr-off] [SKIPPED] >> test.py::test[expr-constraints_of-] >> test.py::test[join-inner_trivial_from_concat-off] [SKIPPED] >> test.py::test[join-inner_with_order-] >> test.py::test[key_filter-empty_range_over_dynamic-] [GOOD] >> test.py::test[like-ilike_clause-default.txt] >> test.py::test[join-mergejoin_choose_primary-] [GOOD] >> test.py::test[join-mergejoin_force_no_sorted-] >> test.py::test[select-column_labels-default.txt] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt] >> test.py::test[union_all-mix_map_and_project-trivial_map] [GOOD] >> test.py::test[view-all_from_view-] >> test.py::test[key_filter-dependent_value-default.txt] [GOOD] >> test.py::test[key_filter-uuid-] [SKIPPED] >> test.py::test[library-package-] [SKIPPED] >> test.py::test[limit-insert_with_limit-] [SKIPPED] >> test.py::test[lineage-flatten_by-] [SKIPPED] >> test.py::test[lineage-grouping_sets-] [SKIPPED] >> test.py::test[lineage-select_join-default.txt] [SKIPPED] |99.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part14/py3test >> test.py::test[ypath-limit_with_range-default.txt] [SKIPPED] >> test.py::test[flatten_by-flatten_one_field-] [GOOD] >> test.py::test[multicluster-insert_fill-] [SKIPPED] >> test.py::test[optimizers-reduce_with_aux_sort_column-] [SKIPPED] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt] [SKIPPED] >> test.py::test[hor_join-max_outtables-] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc-] [SKIPPED] >> test.py::test[hor_join-out_sampling-] [SKIPPED] >> test.py::test[order_by-assume_with_filter-] >> test.py::test[insert-keepmeta_nonstrict_fail-] >> test.py::test[select-select_all_group_by_column-] [GOOD] >> test.py::test[order_by-assume_with_filter-] [SKIPPED] >> test.py::test[order_by-literal-] [SKIPPED] >> test.py::test[order_by-literal_take_zero_sort-] [SKIPPED] >> test.py::test[order_by-native_desc_sort_calc-] >> test.py::test[insert-keepmeta_nonstrict_fail-] [SKIPPED] >> test.py::test[insert-keepmeta_with_read_udf_fail-] [SKIPPED] >> test.py::test[insert-replace_ordered_by_key-default.txt] >> test.py::test[like-like_clause_escape-default.txt] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt] [SKIPPED] >> test.py::test[weak_field-hor_join_with_mix_weak_access-] >> test.py::test[window-win_func_part_by_expr_new-default.txt] [GOOD] >> test.py::test[window-win_func_rank_by_part-] >> test.py::test[insert-replace_ordered_by_key-default.txt] [SKIPPED] >> test.py::test[select-shift_columns-default.txt] >> test.py::test[order_by-native_desc_sort_calc-] [SKIPPED] >> test.py::test[order_by-order_by_dynum-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[5] [GOOD] >> test.py::test[insert_monotonic-keep_unique-] [SKIPPED] >> test_clickbench.py::TestClickbench::test_clickbench[6] >> test.py::test[insert_monotonic-to_empty-] [SKIPPED] |99.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part1/py3test >> test.py::test[window-win_func_first_last_rev-] [GOOD] >> test.py::test[join-group_compact_by-] >> test.py::test[expr-constraints_of-] [GOOD] >> test.py::test[expr-empty_iterator2-] >> test.py::test[join-lookupjoin_semi_2o-] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort-off] [SKIPPED] >> test.py::test[join-mergejoin_semi_composite_to_inner-off] [SKIPPED] >> test.py::test[join-mergejoin_semi_to_inner-off] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-] >> test.py::test[join-join_comp_common_table-] [GOOD] >> test.py::test[join-lookupjoin_semi_2o-off] [SKIPPED] >> test.py::test[join-lookupjoin_with_cache-] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord-off] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-] [SKIPPED] >> test.py::test[join-mergejoin_big_primary-] >> test.py::test[select-corr_name_in_select_seq-default.txt] [GOOD] >> test.py::test[select-sampleselect-] >> test.py::test[join-inner_with_order-] [GOOD] >> test.py::test[join-join_comp_map_table-off] [SKIPPED] >> test.py::test[like-ilike_clause-default.txt] [GOOD] >> test.py::test[limit-limit-] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc-] >> test.py::test[join-join_no_correlation_in_order_by-] >> test.py::test[view-all_from_view-] [GOOD] >> test.py::test[view-trivial_view-] >> test.py::test[limit-yql-8046_empty_sorted_desc-] [SKIPPED] >> test.py::test[lineage-group_by_asstruct_key-default.txt] [SKIPPED] >> test.py::test[lineage-list_literal4-default.txt] >> test.py::test[join-mergejoin_force_no_sorted-] [GOOD] >> test.py::test[lineage-list_literal4-default.txt] [SKIPPED] >> test.py::test[lineage-select_all-default.txt] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt] >> test.py::test[join-mergejoin_saves_output_sort_cross-] >> test.py::test[lineage-select_field_filter-default.txt] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other-] [SKIPPED] >> test.py::test[order_by-SortByOneFieldDesc-] >> test.py::test[pg-tpch-q22-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_expr-default.txt-Results] >> test_stats_mode.py::TestStatsMode::test_mode[v1-STATS_MODE_PROFILE-client0] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc-] >> test_clickbench.py::TestClickbench::test_clickbench[6] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[7] >> test.py::test[window-win_func_aggr_with_qualified_all-] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns-] >> test.py::test[join-group_compact_by-] [GOOD] >> test.py::test[join-inner_on_key_only-] |99.1%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part2/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[weak_field-hor_join_with_mix_weak_access-] [GOOD] >> test.py::test[window-current/session_incompat_sort-] >> test.py::test[expr-empty_iterator2-] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt] >> test.py::test[window-win_func_rank_by_part-] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt] [GOOD] >> test.py::test[window-win_func_special-] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted-] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on-] [SKIPPED] >> test.py::test[lineage-process-default.txt] >> test.py::test[select-sampleselect-] [GOOD] >> test.py::test[join-mergejoin_big_primary-] [GOOD] >> test.py::test[join-opt_on_opt_side-] >> test.py::test[table_range-each_with_non_existing-] >> test.py::test[join-mergejoin_choose_primary_with_retry-] >> test.py::test[lineage-process-default.txt] [SKIPPED] >> test.py::test[lineage-select_field_order_by-default.txt] [SKIPPED] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt] >> test.py::test[table_range-each_with_non_existing-] [SKIPPED] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt] [SKIPPED] >> test.py::test[tpch-q1-default.txt] >> test.py::test[view-trivial_view-] [GOOD] >> test.py::test[view-view_with_lambda-] >> test.py::test[optimizers-sort_constraint_in_left-] >> test.py::test[join-join_no_correlation_in_order_by-] [GOOD] >> test.py::test[join-left_only_with_other-] >> test.py::test[select-shift_columns-default.txt] [GOOD] >> test.py::test[order_by-SortByOneFieldDesc-] [GOOD] >> test.py::test[order_by-assume_over_input-] [SKIPPED] >> test.py::test[order_by-order_by_tablepath_column-] >> test.py::test[join-mergejoin_saves_output_sort_cross-] [GOOD] >> test.py::test[join-premap_common_inner-] >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] >> test.py::test[select-table_funcs_spec-default.txt] >> test.py::test[join-premap_common_inner-] [SKIPPED] >> test.py::test[order_by-order_by_num_key_and_subkey_desc-] [GOOD] >> test.py::test[join-premap_map_cross-off] [SKIPPED] >> test.py::test[join-pullup_random-off] >> test.py::test[pg-join_using_tables2-default.txt] >> test.py::test[select-table_funcs_spec-default.txt] [SKIPPED] >> test.py::test[join-pullup_random-off] [SKIPPED] >> test.py::test[join-yql_465-off] >> test.py::test[flatten_by-flatten_expr_struct-default.txt] [GOOD] >> test.py::test[select-trivial_between-default.txt] >> test.py::test[pg-join_using_tables2-default.txt] [SKIPPED] >> test.py::test[hor_join-fuse_multi_usage-outlimit] >> test.py::test[join-yql_465-off] [SKIPPED] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> test.py::test[pg-select_starref1-default.txt] [SKIPPED] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns-] [GOOD] >> test.py::test[hor_join-fuse_multi_usage-outlimit] [SKIPPED] >> test.py::test[window-win_func_rank_by_opt_all-] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt] >> test.py::test[pg-tpcds-q90-default.txt] >> test.py::test[in-in_tablesource_on_raw_list-] [SKIPPED] >> test.py::test[in-in_with_opt_tuple-default.txt] >> test.py::test[pg-tpcds-q90-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q08-default.txt] >> test.py::test[pg-tpch-q08-default.txt] [SKIPPED] >> test.py::test[produce-discard_process_with_lambda-default.txt] [SKIPPED] >> test.py::test[produce-discard_reduce_lambda-] [SKIPPED] >> test.py::test[produce-process_sorted_multi_out-] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple-] [SKIPPED] >> test.py::test[ql_filter-integer_single_equals-] >> test.py::test[join-inner_on_key_only-] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off] >> test.py::test[ql_filter-integer_single_equals-] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o-off] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off] >> test.py::test[result_types-data-default.txt] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off] [SKIPPED] >> test.py::test[view-view_with_lambda-] [GOOD] >> test.py::test[weak_field-weak_field_join-] >> test.py::test[result_types-singular-default.txt] [SKIPPED] >> test.py::test[join-lookupjoin_semi-off] >> test.py::test[window-current/session_incompat_sort-] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt] [SKIPPED] >> test.py::test[join-opt_on_opt_side-] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group-] >> test.py::test[window-win_func_special-] [GOOD] >> test.py::test[join-lookupjoin_semi-off] [SKIPPED] >> test.py::test[sampling-bind_default-default.txt] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys-] >> test.py::test[join-mergejoin_choose_primary_with_retry-] [GOOD] >> test.py::test[join-mergejoin_force_align3-off] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_norename-off] [SKIPPED] >> test.py::test[window-full/aggregations-] >> test.py::test[join-no_empty_join_for_dyn-] >> test.py::test[sampling-bind_join_left-default.txt] [SKIPPED] >> test.py::test[window-win_over_few_partitions_other-] >> test_clickbench.py::TestClickbench::test_clickbench[7] [GOOD] >> test.py::test[schema-select_all_inferschema_limit-] [SKIPPED] >> test.py::test[schema-select_all_inferschema_range_empty_fail-] [SKIPPED] >> test.py::test[select-deep_udf_call-] >> test.py::test[tpch-q1-default.txt] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma-] >> test_clickbench.py::TestClickbench::test_clickbench[8] >> test.py::test[join-left_only_with_other-] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-] >> test.py::test[optimizers-sort_constraint_in_left-] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown-] >> TDqPqRdReadActorTests::Backpressure [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown-] [SKIPPED] >> test.py::test[optimizers-yql-3455_filter_sorted-] >> test.py::test[order_by-order_by_tablepath_column-] [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 >> test.py::test[pg-tpcds-q07-default.txt] >> test.py::test[pg-tpcds-q07-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q21-default.txt] [SKIPPED] >> YdbWorkloadTopic::Default_RunFull [GOOD] >> YdbWorkloadTopic::Init_Clean >> test.py::test[pg-tpcds-q26-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q36-default.txt] >> test.py::test[pg-tpcds-q36-default.txt] [SKIPPED] >> test.py::test[produce-reduce_with_assume_in_subquery-] >> test.py::test[key_filter-extend_over_map_with_same_schema-default.txt] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt] [GOOD] >> test.py::test[lineage-window_member_struct-default.txt] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys-] [GOOD] >> test.py::test[optimizers-aggregate_over_aggregate-] >> test.py::test[insert-keepmeta_view_fail-] [SKIPPED] >> test.py::test[insert-select_after_replace_unwrap-default.txt] [SKIPPED] >> test.py::test[weak_field-weak_field_join-] [GOOD] >> test.py::test[window-distinct_over_window_full_frames-] [SKIPPED] >> test.py::test[window-row_number_to_map_multiple-default.txt] >> test.py::test[insert-select_relabel-default.txt] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn-] [GOOD] >> test.py::test[join-aggr_diff_order-default.txt] >> test.py::test[join-mapjoin_early_rewrite-] >> test.py::test[join-opt_on_opt_side_with_group-] [GOOD] >> test.py::test[join-pullup_extend-] [SKIPPED] >> test.py::test[type_v3-ignore_v3_pragma-] [GOOD] >> test.py::test[join-pullup_left-off] [SKIPPED] >> test.py::test[join-pushdown_filter_over_left-off] [SKIPPED] >> test.py::test[join-right_trivial-] >> test.py::test[join-premap_common_inner_both_sides-] >> test.py::test[select-deep_udf_call-] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs_reuse_args_fail-] [SKIPPED] >> test.py::test[udf-udaf_lambda-default.txt] >> test.py::test[select-exists_true-default.txt] >> test.py::test[join-premap_common_inner_both_sides-] [SKIPPED] >> test.py::test[join-premap_merge_inner-off] [SKIPPED] >> test.py::test[join-premap_merge_with_remap-off] [SKIPPED] >> test.py::test[join-pullup_left_semi-] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> test.py::test[select-trivial_between-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt] >> test.py::test[window-full/aggregations-] [GOOD] >> test.py::test[window-generic/aggregations_after_current-] >> YdbWorkloadTopic::Init_Clean [GOOD] >> YdbWorkloadTopic::Clean_Without_Init >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_many-] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--Results] [GOOD] >> test.py::test[ytflow-select--Results] >> test.py::test[join-mapjoin_unused_keys-] |99.1%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part11/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[ytflow-select--Results] [SKIPPED] >> test.py::test[window-win_over_few_partitions_other-] [GOOD] >> test.py::test[ypath-complex-default.txt] >> YdbWorkloadTopic::Clean_Without_Init [GOOD] >> test.py::test[sampling-bind_expr-default.txt-Results] [GOOD] >> YdbWorkloadTopic::Double_Init >> test_clickbench.py::TestClickbench::test_clickbench[8] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Results] >> test_clickbench.py::TestClickbench::test_clickbench[9] >> test.py::test[sampling-bind_join_right-default.txt-Results] [SKIPPED] >> test.py::test[sampling-join_left_sample-default.txt-Results] >> test.py::test[window-win_func_rank_by_opt_all-] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted-] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row-] [SKIPPED] >> test.py::test[order_by-extract_members_over_sort_desc-] >> test.py::test[order_by-extract_members_over_sort_desc-] [SKIPPED] >> test.py::test[order_by-order_by_tuple-default.txt] >> YdbWorkloadTopic::Double_Init [GOOD] >> YdbWorkloadTopic::Read_Statistics >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_NONE-client0] >> test.py::test[udf-udaf_lambda-default.txt] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt] [SKIPPED] >> test.py::test[window-current/session-] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions >> test.py::test[window-win_func_aggr_4func-] >> test.py::test[optimizers-aggregate_over_aggregate-] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting-] [SKIPPED] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt] >> test.py::test[join-pullup_left_semi-] [GOOD] >> test.py::test[join-yql-16011-] [SKIPPED] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda-] [SKIPPED] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map-] >> test.py::test[json-jsondocument/insert-] [SKIPPED] >> test.py::test[key_filter-complex-default.txt] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery-] [GOOD] >> test.py::test[ql_filter-integer_select_other-] [SKIPPED] >> test.py::test[sampling-orderedjoin_left_sample-default.txt] >> test.py::test[select-exists_true-default.txt] [GOOD] >> TDqPqRdReadActorTests::MetadataFields >> test.py::test[select-logical_ops-default.txt] >> test.py::test[sampling-orderedjoin_left_sample-default.txt] [SKIPPED] >> test.py::test[sampling-reduce-] >> test.py::test[sampling-reduce-] [SKIPPED] >> test.py::test[schema-skip_complex_type-] |99.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[ytflow-select--Results] [SKIPPED] >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> test.py::test[join-right_trivial-] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off] [SKIPPED] >> test.py::test[join-yql-14829_left-off] [SKIPPED] >> test.py::test[ypath-complex-default.txt] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt] [GOOD] >> test.py::test[order_by-ordered_fill-] [SKIPPED] >> test.py::test[join-yql-4275-off] [SKIPPED] >> test.py::test[json-json_value/example-] >> test.py::test[join-mapjoin_unused_keys-] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off] >> test.py::test[order_by-sort_decimals-] |99.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part4/py3test >> test.py::test[window-win_func_rank_by_opt_all-] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct-off] [SKIPPED] >> test.py::test[window-generic/aggregations_after_current-] [GOOD] >> test.py::test[join-mergejoin_force_align1-] [SKIPPED] >> test.py::test[window-generic/session-] >> test.py::test[join-aggr_diff_order-default.txt] [GOOD] >> test.py::test[join-mergejoin_force_align2-off] [SKIPPED] >> test.py::test[join-bush_in_in_in-] >> test.py::test[join-mergejoin_saves_output_sort_cross-off] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_left-] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key-default.txt] [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail-] [SKIPPED] >> test.py::test[table_range-range_over_filter-] >> test.py::test[join-mapjoin_early_rewrite-] [GOOD] >> test.py::test[window-win_func_aggr_4func-] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt] >> test.py::test[join-mapjoin_early_rewrite_star-off] >> test_clickbench.py::TestClickbench::test_clickbench[9] [GOOD] >> test.py::test[table_range-range_over_filter-] [SKIPPED] >> test.py::test[tpch-q10-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[10] >> test.py::test[join-mapjoin_early_rewrite_star-off] [SKIPPED] >> test.py::test[join-mergejoin_big_primary-off] >> test.py::test[window-current/session-] [GOOD] >> test.py::test[window-full/aggregations_leadlag-] >> test.py::test[join-mergejoin_big_primary-off] [SKIPPED] >> test.py::test[key_filter-complex-default.txt] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt] >> test.py::test[join-mergejoin_narrows_output_sort-off] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch-] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.WARNING] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map-] [GOOD] >> test.py::test[order_by-native_desc_publish-] [SKIPPED] >> test.py::test[order_by-order_by_list_of_strings-] |99.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/py3test >> test_ydb_backup.py::TestDatabaseBackupRestore::test_database_backup_restore [GOOD] >> test.py::test[schema-skip_complex_type-] [GOOD] >> test.py::test[select-logical_ops-default.txt] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict-] [SKIPPED] >> test.py::test[select-anon_clash-] [SKIPPED] >> test.py::test[select-bit_ops-default.txt] >> test.py::test[table_range-concat_with_view-] >> test.py::test[table_range-concat_with_view-] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf-] [SKIPPED] >> test.py::test[table_range-range_over_like-] [SKIPPED] >> test.py::test[table_range-range_over_regexp-] [SKIPPED] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> test.py::test[tpch-q13-default.txt] >> test.py::test[json-json_value/example-] [GOOD] >> TDqPqReadActorTest::TestReadFromTopic >> test.py::test[key_filter-between_with_key_filter-] >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] [GOOD] >> test.py::test[join-bush_in_in_in-] [GOOD] >> test.py::test[join-count_bans-off] [SKIPPED] >> test.py::test[join-filter_joined-off] [SKIPPED] >> test.py::test[join-grace_join1-grace] [SKIPPED] >> test.py::test[window-generic/session-] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt] >> test.py::test[join-inner_with_select-off] [SKIPPED] >> test.py::test[join-join_key_cmp_udf-off] >> test.py::test[order_by-sort_decimals-] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt] [SKIPPED] >> test.py::test[pg-aggregate_minus_zero-] [SKIPPED] >> test.py::test[pg-join_using_tables3-default.txt] >> test.py::test[join-join_key_cmp_udf-off] [SKIPPED] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt] >> test.py::test[join-join_without_column-] >> test_clickbench.py::TestClickbench::test_clickbench[10] [GOOD] |99.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part16/py3test >> test.py::test[ypath-complex-default.txt] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] >> test.py::test[pg-join_using_tables3-default.txt] [SKIPPED] >> test.py::test[pg-select_alias_partial-default.txt] [SKIPPED] >> test.py::test[pg-select_subquery2-default.txt] [SKIPPED] >> test.py::test[pg-table_func-default.txt] [SKIPPED] >> test.py::test[pg-wide_sort-] >> test.py::test[join-mergejoin_sorts_output_for_sort_left-] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_right-] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt] [GOOD] >> test.py::test[key_filter-empty_range-] >> test.py::test[window-full/aggregations_leadlag-] [GOOD] >> test.py::test[window-full/leadlag_compact-] >> test.py::test[window-win_func_auto_arg-default.txt] [GOOD] >> YdbWorkloadTopic::Read_Statistics [GOOD] >> YdbWorkloadTopic::Write_Statistics >> test.py::test[order_by-order_by_list_of_strings-] [GOOD] >> test.py::test[window-win_func_in_lib-] [SKIPPED] >> test.py::test[order_by-warn_offset_wo_sort-] [SKIPPED] |99.1%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[window-win_func_over_group_by_list_names-] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch-] [GOOD] >> test.py::test[pg-aggregate_combine-] [SKIPPED] >> test.py::test[pg-all_data-] [SKIPPED] >> test.py::test[join-nested_semi_join-] >> test.py::test[tpch-q10-default.txt] [GOOD] >> test.py::test[pg-wide_top_sort-] >> test.py::test[udf-python_struct-] [SKIPPED] >> test.py::test[udf-two_regexps-] >> test.py::test[select-bit_ops-default.txt] [GOOD] >> test.py::test[select-if-default.txt] >> test.py::test[key_filter-between_with_key_filter-] [GOOD] >> test.py::test[key_filter-contains_tuples-default.txt] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] >> test.py::test[join-join_without_column-] [GOOD] >> test.py::test[pg-wide_sort-] [GOOD] >> test.py::test[join-mapjoin_early_rewrite-off] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_sequence-] >> test.py::test[pg-tpcds-q18-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q53-default.txt] >> test.py::test[pg-tpcds-q53-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q65-default.txt] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] >> test.py::test[window-win_func_spec_with_part-] >> test.py::test[pg-tpcds-q65-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q81-default.txt] >> test.py::test[tpch-q13-default.txt] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt] [SKIPPED] >> test.py::test[tpch-q14-default.txt] >> test.py::test[pg-tpcds-q91-default.txt] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.ERROR] >> test.py::test[pg-tpcds-q91-default.txt] [SKIPPED] >> test.py::test[window-win_func_over_group_by_list_names-] [GOOD] >> test.py::test[join-nested_semi_join-] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix-] >> test.py::test[pg-wide_top_sort-] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt] >> test.py::test[join-nopushdown_filter_with_depends_on-off] >> test.py::test[pg-tpcds-q96-default.txt] [SKIPPED] >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> TDqPqReadActorTest::TestReadFromTopic [GOOD] >> test.py::test[select-if-default.txt] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q13-default.txt] >> test_workload.py::TestYdbKvWorkload::test[column] >> test.py::test[sampling-join_left_sample-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt] >> test.py::test[join-nopushdown_filter_with_depends_on-off] [SKIPPED] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] >> test.py::test[join-premap_map_inner-off] [SKIPPED] >> test.py::test[join-premap_merge_inner-] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate] >> test.py::test[join-mergejoin_sorts_output_for_sort_right-] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names-] >> test.py::test[pg-tpcds-q13-default.txt] [SKIPPED] >> test.py::test[select-missing_with_nonpersist-] >> TDqPqReadActorTest::TestReadFromTopicFromNow >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate] [SKIPPED] >> test.py::test[pg-tpcds-q99-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q01-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q16-default.txt] >> test.py::test[pg-tpcds-q85-default.txt] >> test.py::test[join-star_join_mirror-] >> test.py::test[pg-tpcds-q85-default.txt] [SKIPPED] >> test.py::test[key_filter-empty_range-] [GOOD] >> test.py::test[pg-tpch-q12-default.txt] >> test.py::test[pg-tpch-q16-default.txt] [SKIPPED] >> test.py::test[produce-process_lambda_opt_args-default.txt] [SKIPPED] >> test.py::test[produce-process_rows_sorted_desc_multi_out-] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out-] >> test.py::test[pg-tpch-q12-default.txt] [SKIPPED] >> test.py::test[produce-process_row_and_columns-default.txt] [SKIPPED] >> test.py::test[produce-process_sorted_desc_multi_out-] [SKIPPED] >> test.py::test[produce-reduce_with_flat_python_stream-] [SKIPPED] >> test.py::test[produce-reduce_with_python_having-] >> test.py::test[limit-dynamic_limit-] [SKIPPED] >> test.py::test[produce-process_with_lambda-default.txt] >> test.py::test[limit-dynamic_sort_limit-] [SKIPPED] >> test.py::test[limit-empty_read_after_limit-default.txt] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt] [SKIPPED] >> test.py::test[optimizers-fuse_map_mapreduce-] [SKIPPED] >> test.py::test[produce-reduce_with_python_having-] [SKIPPED] >> test.py::test[optimizers-length_over_merge_fs_multiusage-] >> KqpFederatedQuery::ExecuteScriptWithLargeFile [GOOD] >> KqpFederatedQuery::ExecuteScriptWithThinFile >> test.py::test[join-mapjoin_early_rewrite_sequence-] [GOOD] >> test.py::test[produce-reduce_with_python_row_repack-] [SKIPPED] >> test.py::test[ql_filter-integer_optional_null-] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-] >> test.py::test[ql_filter-integer_optional_null-] [SKIPPED] >> test.py::test[sampling-subquery_multiple_sample-default.txt] >> test.py::test[key_filter-contains_tuples-default.txt] [GOOD] >> test.py::test[limit-sort_calc_limit-] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt] [SKIPPED] >> test.py::test[window-full/leadlag_compact-] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt] >> test.py::test[sampling-subquery_multiple_sample-default.txt] [SKIPPED] >> test.py::test[udf-two_regexps-] [GOOD] >> test.py::test[schema-limit_directread-] >> test.py::test[lineage-scalar_context-] [SKIPPED] >> test.py::test[udf-udf_call_with_group_and_limit-] >> test.py::test[optimizers-keep_sort_with_renames-] [SKIPPED] >> test.py::test[optimizers-length_over_merge-] >> test.py::test[schema-limit_directread-] [SKIPPED] >> test.py::test[schema-select_all-row_spec_extra_sort] >> test.py::test[window-win_func_spec_with_part-] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[12] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> test.py::test[join-mergejoin_with_different_key_names-] [GOOD] >> test.py::test[join-no_empty_join_for_dyn-off] [SKIPPED] >> test.py::test[select-missing_with_nonpersist-] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner-off] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group-off] [SKIPPED] >> test.py::test[select-optional_pull-] >> test.py::test[join-premap_context_dep-] >> test.py::test[join-premap_context_dep-] [SKIPPED] >> test.py::test[join-pullup_random-] >> test.py::test[select-optional_pull-] [SKIPPED] >> test.py::test[select-type_assert-default.txt] >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> test.py::test[join-star_join_mirror-] [GOOD] >> test.py::test[join-star_join_semionly-] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix-] [GOOD] >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> test.py::test[window-win_func_with_struct_access_full_access-default.txt] >> YdbWorkloadTopic::Write_Statistics [GOOD] |99.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/py3test >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> YdbWorkloadTopic::ReadWrite_Statistics >> TDqPqReadActorTest::ReadWithFreeSpace >> test.py::test[schema-select_all-row_spec_extra_sort] [GOOD] >> test.py::test[schema-user_schema_missing_column-] >> test.py::test[tpch-q14-default.txt] [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] >> test.py::test[tpch-q8-default.txt] >> test.py::test[optimizers-length_over_merge_fs_multiusage-] [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi-] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-] [GOOD] >> test.py::test[join-mapjoin_sharded-default.txt] >> test.py::test[window-row_number_no_part_from_subq-default.txt] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other-] >> test.py::test[window-win_multiaggr_list-default.txt] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic-] [SKIPPED] >> test.py::test[produce-process_with_lambda-default.txt] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] >> test_diff_processing.py::TestTpcdsDiffProcessing::test_tpcds[CheckCanonicalPolicy.ERROR] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.NO] >> test.py::test[udf-udf_call_with_group_and_limit-] [GOOD] >> test.py::test[view-standalone_view_lambda-] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_combine-] >> test.py::test[optimizers-length_over_merge-] [GOOD] >> test.py::test[optimizers-sorted_sql_in-] [SKIPPED] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep-] [SKIPPED] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs-] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_NONE-client0] [GOOD] >> test.py::test[schema-user_schema_missing_column-] [GOOD] >> test.py::test[select-scalar_subquery_with_star-default.txt] >> test.py::test[join-pullup_random-] [GOOD] >> test.py::test[join-selfjoin_on_sorted-] >> test.py::test[join-star_join_semionly-] [GOOD] >> test.py::test[join-yql-8980-] >> test.py::test[select-type_assert-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt] >> test.py::test[join-mapjoin_sharded-default.txt] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested-off] >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested-off] [SKIPPED] >> test.py::test[join-premap_common_inner_filter-off] [SKIPPED] >> test.py::test[join-premap_common_right_tablecontent-] [SKIPPED] >> test.py::test[join-star_join_semionly_premap-] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt] [GOOD] >> test.py::test[ytflow-select_over_static-] [SKIPPED] >> test.py::test[join-star_join_semionly_premap-] [SKIPPED] >> test.py::test[join-starjoin_unused_keys-] >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> test.py::test[optimizers-unused_columns_group_one_of_multi-] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt] >> TDqPqReadActorTest::TestSaveLoadPqRead >> test.py::test[window-win_func_lead_lag_worm_with_part_other-] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine-] [GOOD] >> test.py::test[window-win_func_part_by_expr-] >> test.py::test[weak_field-weak_field_aggregation-] >> test.py::test[produce-process_with_lambda_outstream-default.txt] [GOOD] >> test.py::test[produce-reduce_multi_in-empty] >> test.py::test[produce-reduce_multi_in-empty] [SKIPPED] >> test.py::test[ql_filter-integer_many_left-] >> test.py::test[ql_filter-integer_many_left-] [SKIPPED] >> test_clickbench.py::TestClickbench::test_clickbench[14] [GOOD] >> test.py::test[sampling-bind_expr_udf-] >> test_clickbench.py::TestClickbench::test_clickbench[15] >> test.py::test[join-selfjoin_on_sorted-] [GOOD] >> test.py::test[tpch-q8-default.txt] [GOOD] >> test.py::test[join-star_join_inners_premap-off] [SKIPPED] >> test.py::test[join-star_join_multi-off] >> test.py::test[join-star_join_multi-off] [SKIPPED] >> test.py::test[type_v3-singulars-] [SKIPPED] >> test.py::test[join-strict_keys-] >> test.py::test[udf-python_script_from_file-] [SKIPPED] >> test.py::test[view-file_inner-] >> test.py::test[view-file_inner-] [SKIPPED] >> test.py::test[view-view_with_library-] [SKIPPED] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs-] [GOOD] >> test.py::test[order_by-literal_empty_list_sort-] [SKIPPED] >> test.py::test[join-strict_keys-] [SKIPPED] >> test.py::test[join-three_equalities-off] >> test.py::test[join-yql-8980-] [GOOD] |99.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part11/py3test >> test.py::test[ypath-direct_read_from_dynamic-] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_map-] >> test.py::test[key_filter-datetime-default.txt] >> test.py::test[order_by-order_by_tablerow_column-] >> test.py::test[join-three_equalities-off] [SKIPPED] >> test.py::test[join-yql-4275-] >> test.py::test[key_filter-datetime-default.txt] [SKIPPED] >> test.py::test[key_filter-nile_pred-] >> test.py::test[simple_columns-simple_columns_join_all-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt] >> test.py::test[select-scalar_subquery_with_star-default.txt] [GOOD] >> test.py::test[select-trivial_having-default.txt] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt] [GOOD] >> test.py::test[weak_field-weak_field_aggregation-] [GOOD] >> test.py::test[window-empty/aggregations-] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props-] [SKIPPED] >> test.py::test[order_by-SortByOneField-] >> test.py::test[sampling-bind_expr_udf-] [GOOD] >> test.py::test[window-win_func_part_by_expr-] [GOOD] >> test.py::test[window-win_over_few_partitions-] >> test.py::test[join-starjoin_unused_keys-] [GOOD] >> test.py::test[json-json_query/example-] >> test_clickbench.py::TestClickbench::test_clickbench[15] [GOOD] >> YdbWorkloadTopic::ReadWrite_Statistics [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] >> YdbWorkloadTopic::Write_Statistics_UseTx |99.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] >> test.py::test[sampling-sort-default.txt] [SKIPPED] >> test.py::test[sampling-system_sampling-io_block_size] [SKIPPED] >> test.py::test[sampling-topsort-default.txt] [SKIPPED] >> test.py::test[schema-insert-row_spec] |99.2%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/py3test >> test.py::test[schema-insert-row_spec] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_map-] [GOOD] >> test.py::test[schema-insert_sorted-schema] [SKIPPED] >> test.py::test[window-rank/plain-] >> test.py::test[schema-select_all-row_spec_diff_sort2] |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part10/py3test >> test.py::test[ytflow-select_over_static-] [SKIPPED] >> test.py::test[order_by-order_by_tablerow_column-] [GOOD] >> test.py::test[order_by-sort_with_take_limit-] [SKIPPED] >> test.py::test[pg-nulls_native-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt] [GOOD] >> test.py::test[select-trivial_having-default.txt] [GOOD] >> test.py::test[pg-select_starref2-default.txt] [SKIPPED] >> test.py::test[pg-select_subquery-default.txt] >> test.py::test[select-use_cluster-default.txt] >> test.py::test[tpch-q12-default.txt] [SKIPPED] >> test.py::test[pg-select_subquery-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q12-default.txt] [SKIPPED] >> test.py::test[tpch-q6-default.txt] >> test.py::test[pg-tpcds-q34-default.txt] >> test.py::test[key_filter-nile_pred-] [GOOD] >> test.py::test[like-regexp_clause-] >> test.py::test[pg-tpcds-q34-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q59-default.txt] >> test.py::test[join-yql-4275-] [GOOD] >> test.py::test[key_filter-is_null_or_data-] >> test.py::test[json-json_query/example-] [GOOD] >> test.py::test[key_filter-mixed_opt_bounds-] [SKIPPED] >> test.py::test[pg-tpcds-q59-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q94-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q22-default.txt] [SKIPPED] >> test.py::test[key_filter-multiusage-] [SKIPPED] >> test.py::test[key_filter-part_key_over_dynamic-] >> test.py::test[produce-process_trivial_as_struct-default.txt] >> test.py::test[window-win_over_few_partitions-] [GOOD] >> test.py::test[order_by-SortByOneField-] [GOOD] >> test.py::test[window-yql-14738-default.txt] >> test.py::test[order_by-assume_with_transform_desc-] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit-] [SKIPPED] >> test.py::test[order_by-order_by_tuple_expr-default.txt] |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2] [GOOD] >> test.py::test[schema-select_field-schema] >> test_clickbench.py::TestClickbench::test_clickbench[16] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_BASIC-client0] >> test.py::test[window-empty/aggregations-] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank-] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] >> test.py::test[key_filter-is_null_or_data-] [GOOD] >> test.py::test[key_filter-tzdate-] [SKIPPED] >> test.py::test[key_filter-utf8_with_legacy-] >> test.py::test[produce-process_trivial_as_struct-default.txt] [GOOD] >> test.py::test[produce-process_with_udf-default.txt] |99.2%| [TM] {RESULT} ydb/tests/sql/py3test >> test.py::test[key_filter-part_key_over_dynamic-] [GOOD] >> test.py::test[lambda-lambda_udf-] >> test.py::test[window-rank/plain-] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[17] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] >> test.py::test[window-win_func_order_by_udf_empty_rank-] [GOOD] >> test.py::test[schema-select_field-schema] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt] >> test_workload.py::TestYdbLogWorkload::test[row] >> test.py::test[tpch-q6-default.txt] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt] >> test.py::test[like-regexp_clause-] [GOOD] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt] >> test.py::test[select-use_cluster-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt] >> test.py::test[order_by-order_by_tuple_expr-default.txt] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt] [SKIPPED] >> test.py::test[pg-in_mixed-] [SKIPPED] >> test.py::test[pg-tpcds-q14-default.txt] >> test.py::test[produce-process_with_udf-default.txt] [GOOD] >> test.py::test[produce-reduce_all_list-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume-] [SKIPPED] >> test.py::test[pg-tpcds-q14-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q25-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap-] [SKIPPED] >> test.py::test[produce-reduce_with_flat_lambda-default.txt] >> test.py::test[pg-tpcds-q61-default.txt] [SKIPPED] >> test.py::test[key_filter-utf8_with_legacy-] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter-] [SKIPPED] >> test.py::test[key_filter-yql-8663-dedup_ranges-] >> test.py::test[pg-tpcds-q92-default.txt] [SKIPPED] >> test.py::test[produce-process_multi_in_trivial_lambda-] >> test_clickbench.py::TestClickbench::test_clickbench[18] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] >> test.py::test[window-yql-14738-default.txt] [GOOD] >> test.py::test[lambda-lambda_udf-] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt] >> YdbWorkloadTopic::Write_Statistics_UseTx [GOOD] >> YdbWorkloadTopic::Full_Statistics_UseTx |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part13/py3test >> test.py::test[window-win_func_order_by_udf_empty_rank-] [GOOD] >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] >> test.py::test[union_all-mix_map_and_read-default.txt] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt] >> test.py::test[window-row_number_to_map_noncompact-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt] >> test.py::test[window-win_by_all_aggregate-] >> test.py::test[limit-empty_sort_desc_after_limit-default.txt] [GOOD] >> test.py::test[lineage-if_struct-default.txt] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset_range-] [SKIPPED] >> test.py::test[optimizers-unordered_over_sort-] [SKIPPED] >> test.py::test[order_by-order_by_expr_over_sorted_table-] |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part18/py3test >> test.py::test[window-yql-14738-default.txt] [GOOD] |99.2%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part16/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[select-dict_with_few_keys-default.txt] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt] [GOOD] >> test.py::test[select-literal_bool-default.txt] >> test.py::test[limit-many_top_sorts-default.txt] >> test.py::test[produce-reduce_with_flat_lambda-default.txt] [GOOD] >> test.py::test[sampling-reduce-with_premap] [SKIPPED] >> test.py::test[sampling-yql-14664_deps-default.txt] [SKIPPED] >> test.py::test[schema-user_schema_no_infer-] >> test.py::test[key_filter-yql-8663-dedup_ranges-] [GOOD] >> test.py::test[library-package_override-] [SKIPPED] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[20] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[21] >> test.py::test[produce-process_multi_in_trivial_lambda-] [GOOD] >> test.py::test[produce-reduce_all_opt-default.txt] [SKIPPED] >> test.py::test[produce-reduce_multi_in_keytuple-] [SKIPPED] >> test.py::test[produce-reduce_multi_in_ref-] [SKIPPED] >> test.py::test[produce-reduce_subfields-sorted] [SKIPPED] >> test.py::test[ql_filter-integer_optional-] [SKIPPED] >> test.py::test[sampling-map-] [SKIPPED] >> test.py::test[schema-append_to_desc-] >> test.py::test[schema-append_to_desc-] [SKIPPED] >> test.py::test[schema-select_all-yamred_dsv_raw] [SKIPPED] >> test.py::test[schema-select_all_inferschema2-] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates-default.txt] [GOOD] >> test.py::test[table_range-concat_sorted_max_sorted_tables-] [SKIPPED] >> test.py::test[table_range-range_slash-] [SKIPPED] >> test.py::test[udf-named_args_for_script-] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2-] [SKIPPED] >> test.py::test[union-union_trivial-default.txt] >> test.py::test[order_by-order_by_expr_over_sorted_table-] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt] >> test.py::test[union_all-union_all_trivial-default.txt] [GOOD] >> test.py::test[view-secure-] [SKIPPED] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt] >> test.py::test[schema-user_schema_no_infer-] [GOOD] >> test.py::test[schema-user_schema_patch_columns-] >> test.py::test[limit-many_top_sorts-default.txt] [GOOD] >> test.py::test[lineage-topsort-default.txt] [SKIPPED] >> test.py::test[lineage-unused_columns-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-6133_skip_deps-] [SKIPPED] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers-] [SKIPPED] >> test.py::test[order_by-order_by_mul_columns-default.txt] >> test.py::test[select-literal_bool-default.txt] [GOOD] >> test.py::test[select-sum_to_string-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[21] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] >> test.py::test[window-win_by_all_aggregate-] [GOOD] >> test.py::test[window-win_with_cur_row-] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt] [GOOD] >> test.py::test[limit-yql-9617_empty_lambda-default.txt] >> test_scheduling.py::TestSchedule::test_skip_busy[kikimr0] [SKIPPED] |99.2%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/py3test >> test.py::test[union-union_trivial-default.txt] [GOOD] >> test.py::test[weak_field-weak_field_to_yson-] >> test_result_limits.py::TestResultLimits::test_many_rows >> test.py::test[order_by-order_by_mul_columns-default.txt] [GOOD] >> test.py::test[order_by-order_by_udf_duo-] >> test.py::test[order_by-order_by_missing_project_column-default.txt] [GOOD] >> test.py::test[order_by-order_by_udf-] >> test.py::test[select-sum_to_string-default.txt] [GOOD] >> test.py::test[select-table_content_with_tmp_folder-] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt] [GOOD] >> test.py::test[window-full/aggregations_compact-] >> test.py::test[schema-user_schema_patch_columns-] [GOOD] >> test.py::test[select-create_structures-default.txt] >> test.py::test[window-win_with_cur_row-] [GOOD] >> test.py::test[window-yql-15636-default.txt] [SKIPPED] >> test.py::test[ytflow-select-] [SKIPPED] >> YdbWorkloadTopic::Full_Statistics_UseTx [GOOD] >> YdbWorkloadTopic::WriteInTx >> test.py::test[schema-select_all_inferschema2-] [GOOD] >> test.py::test[schema-select_reordered-default.txt] |99.2%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest >> test.py::test[limit-yql-9617_empty_lambda-default.txt] [GOOD] >> test.py::test[lineage-flatten_list_nested_lambda-] [SKIPPED] >> test.py::test[lineage-join_as_struct-default.txt] [SKIPPED] >> test.py::test[lineage-list_literal2-default.txt] [SKIPPED] >> test.py::test[lineage-pullup_rename-] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt] [SKIPPED] >> test_clickbench.py::TestClickbench::test_clickbench[22] [GOOD] >> test.py::test[lineage-select_nested_table_row-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[23] >> test.py::test[lineage-select_nested_table_row-default.txt] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_same_keys-default.txt] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts-] >> test.py::test[weak_field-weak_field_to_yson-] [GOOD] >> test.py::test[optimizers-test_lmap_opts-] [SKIPPED] >> test.py::test[window-full/noncompact_with_nulls-] >> test.py::test[optimizers-yql-12620_stage_multiuse-] >> test.py::test[order_by-order_by_udf_duo-] [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> test.py::test[pg-select_where-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q11-default.txt] >> test.py::test[pg-tpcds-q11-default.txt] [SKIPPED] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_BASIC-client0] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt] >> test.py::test[pg-tpcds-q16-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q28-default.txt] >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates >> test.py::test[pg-tpcds-q28-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q37-default.txt] >> test.py::test[pg-tpcds-q37-default.txt] [SKIPPED] >> test.py::test[order_by-order_by_udf-] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt] >> test.py::test[pg-tpcds-q06-default.txt] >> test.py::test[pg-tpcds-q06-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q38-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q09-default.txt] >> test.py::test[pg-tpcds-q52-default.txt] [SKIPPED] >> test.py::test[select-table_content_with_tmp_folder-] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q10-default.txt] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_single_thread-] [SKIPPED] >> test.py::test[schema-select_reordered-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt] >> test.py::test[pg-tpcds-q87-default.txt] >> test.py::test[produce-fuse_reduces_diff_sets-] [SKIPPED] >> test.py::test[schema-user_schema_mix1-] >> test.py::test[produce-native_desc_reduce_with_presort-] [SKIPPED] >> test.py::test[pg-tpcds-q87-default.txt] [SKIPPED] >> test.py::test[produce-process_multi_out_bad_count_fail-] [SKIPPED] >> test.py::test[pg-tpcds-q97-default.txt] >> test.py::test[produce-process_streaming_inline_bash-default.txt] >> test.py::test[pg-tpcds-q97-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q07-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q17-default.txt] >> test.py::test[pg-tpch-q17-default.txt] [SKIPPED] >> test.py::test[pg_duplicated-duplicated_rowspec-] [SKIPPED] >> test.py::test[pragma-config_exec-] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-] [SKIPPED] >> test.py::test[ql_filter-integer_eval-] >> test.py::test[select-create_structures-default.txt] [GOOD] >> test.py::test[select-select_all_from_concat_anon-default.txt] [SKIPPED] >> test.py::test[select-simple_struct_field_access-] >> test.py::test[ql_filter-integer_eval-] [SKIPPED] >> test.py::test[ql_filter-integer_members-] [SKIPPED] >> test.py::test[sampling-bind_small_rate-default.txt] [SKIPPED] >> test.py::test[schema-def_values_job-] >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test.py::test[window-full/noncompact_with_nulls-] [GOOD] >> test.py::test[window-mixed/aggregations-] |99.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part3/py3test >> test.py::test[ytflow-select-] [SKIPPED] >> test.py::test[optimizers-yql-12620_stage_multiuse-] [GOOD] >> test.py::test[optimizers-yql-17413-topsort-] >> test.py::test[window-full/aggregations_compact-] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt] >> test_workload.py::TestYdbWorkload::test[column] |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/py3test >> test_clickbench.py::TestClickbench::test_clickbench[23] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] >> test.py::test[produce-process_streaming_inline_bash-default.txt] [GOOD] >> test.py::test[produce-reduce_all_multi_in-default.txt] [SKIPPED] >> test.py::test[produce-reduce_with_python-] [SKIPPED] >> test.py::test[ql_filter-integer_many_right-] [SKIPPED] >> test.py::test[result_types-pg-default.txt] [SKIPPED] >> test.py::test[sampling-bind_expr-default.txt] >> test.py::test[sampling-bind_expr-default.txt] [SKIPPED] >> test.py::test[sampling-sample-default.txt] >> test.py::test[schema-user_schema_mix1-] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[24] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] >> test.py::test[optimizers-yql-17413-topsort-] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt] [SKIPPED] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract-] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.NO] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.WARNING] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt] [SKIPPED] >> test.py::test[tpch-q19-default.txt] >> test.py::test[select-simple_struct_field_access-] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt] >> test.py::test[window-mixed/aggregations-] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part-] >> test_clickbench.py::TestClickbench::test_clickbench[25] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] >> test.py::test[window-win_func_with_struct_access-default.txt] [GOOD] >> test.py::test[schema-def_values_job-] [GOOD] >> test.py::test[schema-other-] [SKIPPED] >> test.py::test[schema-select_all-read_schema] >> test.py::test[sampling-sample-default.txt] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt] [SKIPPED] >> test.py::test[sampling-system_sampling-] [SKIPPED] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.WARNING] [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] >> test.py::test[schema-other_job-] [SKIPPED] >> test.py::test[schema-select_all-row_spec_diff_sort] >> test_clickbench.py::TestClickbench::test_clickbench[26] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark |99.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part2/py3test >> test.py::test[window-win_func_with_struct_access-default.txt] [GOOD] |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/py3test >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_without-default.txt] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt] [SKIPPED] >> test.py::test[tpch-q19-default.txt] [GOOD] >> test.py::test[udf-python_script-] [SKIPPED] >> test.py::test[udf-regexp_udf-] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_filter_combine-] >> test.py::test[tpch-q17-default.txt] |99.3%| [TA] $(B)/ydb/tests/functional/ydb_cli/test-results/py3test/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[27] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract-] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps-] >> test.py::test[window-win_func_aggr_4func_no_part-] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort] [GOOD] >> test.py::test[select-exists_false-default.txt] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_diff_processing.py::TestClickbenchDiffProcessing::test_clickbench[CheckCanonicalPolicy.ERROR] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt] [GOOD] >> test.py::test[select-opt_list_access-default.txt] >> test.py::test[schema-select_all-read_schema] [GOOD] >> test.py::test[schema-select_field-row_spec] >> test_clickbench.py::TestClickbench::test_clickbench[28] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] |99.3%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part4/test-results/pytest/{meta.json ... results_accumulator.log} >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 >> test.py::test[weak_field-optimize_weak_fields_filter_combine-] [GOOD] >> test.py::test[window-current/ansi_current_with_win-] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_FULL-client0] >> test.py::test[select-opt_list_access-default.txt] [GOOD] >> test.py::test[select-uncorrelated_subqueries-] >> test.py::test[window-win_func_first_last_over_nonopt-default.txt] [GOOD] >> test.py::test[window-win_inline_spec-default.txt] >> test.py::test[tpch-q17-default.txt] [GOOD] >> test.py::test[tpch-q18-default.txt] >> test.py::test[select-exists_false-default.txt] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt] |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/py3test >> test.py::test[schema-select_field-row_spec] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt] [SKIPPED] >> test.py::test[select-literal_negative-default.txt] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps-] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt] |99.3%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/py3test >> test_tpch.py::TestTpchS1::test_tpch[1] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] >> test.py::test[window-current/ansi_current_with_win-] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag-] >> test.py::test[tpch-q18-default.txt] [GOOD] >> test.py::test[tpch-q3-default.txt] >> test.py::test[window-win_inline_spec-default.txt] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] >> test.py::test[select-one_unlabeled_column-default.txt] [GOOD] >> test.py::test[select-struct_access_without_table_name-] >> test.py::test[select-literal_negative-default.txt] [GOOD] >> test.py::test[select-multi_source_issue-default.txt] >> test.py::test[order_by-order_by_tuple_and_member-default.txt] [GOOD] >> test.py::test[pg-select_from_columns-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q19-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q20-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q42-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q63-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q73-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q74-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q80-default.txt] [SKIPPED] >> test.py::test[pg-tpcds-q88-default.txt] [SKIPPED] >> test.py::test[select-uncorrelated_subqueries-] [GOOD] >> test.py::test[tpch-q16-default.txt] >> test.py::test[pg-tpch-q18-default.txt] [SKIPPED] >> test.py::test[pg-tpch-q21-default.txt] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull-] [SKIPPED] >> test.py::test[produce-process_and_filter-default.txt] [SKIPPED] >> test.py::test[produce-process_rows_sorted_multi_out-] [SKIPPED] >> test.py::test[produce-reduce_all_field_subset-] [SKIPPED] >> test.py::test[produce-reduce_lambda_list_mem-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[30] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] |99.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part6/py3test >> test.py::test[window-win_inline_spec-default.txt] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag-] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt] |99.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] >> test.py::test[select-multi_source_issue-default.txt] [GOOD] >> test.py::test[select-select_all_filtered-default.txt] >> test.py::test[tpch-q3-default.txt] [GOOD] >> test.py::test[tpch-q4-default.txt] [SKIPPED] >> test.py::test[type_v3-decimal_yt_llvm-] [SKIPPED] >> test.py::test[type_v3-insert_struct_v3_wo_native-] [SKIPPED] >> test.py::test[udf-udaf-] [SKIPPED] >> test.py::test[weak_field-weak_field_esc_string-] >> test.py::test[tpch-q16-default.txt] [GOOD] >> test.py::test[tpch-q2-default.txt] >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype-] [SKIPPED] |99.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/py3test >> test.py::test[produce-reduce_with_trivial_remaps-] [SKIPPED] >> test.py::test[sampling-insert-] [SKIPPED] >> test.py::test[sampling-join_right_sample-default.txt] [SKIPPED] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer >> test.py::test[sampling-mapjoin_left_sample-default.txt] [SKIPPED] >> test.py::test[sampling-subquery_default-default.txt] [SKIPPED] >> test.py::test[sampling-subquery_mapjoin-default.txt] [SKIPPED] >> test.py::test[schema-copy-yamred_dsv_raw] [SKIPPED] >> test.py::test[select-struct_access_without_table_name-] [GOOD] >> test.py::test[schema-select_all-schema] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_disable-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[32] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] >> YdbWorkloadTopic::WriteInTx [GOOD] >> YdbWorkloadTopic::WriteProducesToAllPartitionsEvenly >> test.py::test[window-win_func_auto_arg_two_sort-default.txt] [GOOD] |99.4%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/test-results/pytest/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[33] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] >> test_tpch.py::TestTpchS1::test_tpch[2] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] >> test_clickbench.py::TestClickbench::test_clickbench[34] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] |99.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part0/py3test >> test.py::test[window-win_func_auto_arg_two_sort-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt] >> test.py::test[select-select_all_filtered-default.txt] [GOOD] >> test.py::test[select-substring-default.txt] >> test.py::test[weak_field-weak_field_esc_string-] [GOOD] >> test.py::test[weak_field-weak_field_join_condition-] >> test_clickbench.py::TestClickbench::test_clickbench[35] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] |99.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/py3test >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_FULL-client0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] [GOOD] >> test_workload.py::TestYdbWorkload::test [GOOD] >> test.py::test[schema-select_all-schema] [GOOD] >> test.py::test[schema-select_all_inferschema_range-] [SKIPPED] >> test.py::test[schema-select_yamr_fields-] [SKIPPED] >> test.py::test[schema-skip_complex_type2-] >> test_result_limits.py::TestResultLimits::test_many_rows [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] >> test.py::test[tpch-q2-default.txt] [GOOD] >> test.py::test[tpch-q5-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[37] [GOOD] >> test.py::test[weak_field-weak_field_join_condition-] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset-] [SKIPPED] >> test.py::test[window-current/ansi_current_mixed-] >> test_clickbench.py::TestClickbench::test_clickbench[38] |99.4%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt] [GOOD] >> test.py::test[tpch-q22-default.txt] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test.py::test[select-substring-default.txt] [GOOD] >> test.py::test[select-where_cast-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[38] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] >> test.py::test[tpch-q5-default.txt] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield] >> test_clickbench.py::TestClickbench::test_clickbench[39] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] >> test.py::test[schema-skip_complex_type2-] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TPqWriterTest::TestWriteToTopic >> TPqWriterTest::TestWriteToTopic [GOOD] >> test.py::test[tpch-q22-default.txt] [GOOD] >> test.py::test[tpch-q7-default.txt] >> TPqWriterTest::TestWriteToTopicMultiBatch >> test_clickbench.py::TestClickbench::test_clickbench[40] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] |99.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/py3test >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> TPqWriterTest::TestDeferredWriteToTopic >> test.py::test[select-where_cast-default.txt] [GOOD] >> test.py::test[table_range-range_tables_with_view-] [SKIPPED] >> test.py::test[table_range-range_with_view-] [SKIPPED] >> test.py::test[type_v3-mixed_with_columns-] >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> TPqWriterTest::TestCheckpoints >> test.py::test[window-current/ansi_current_mixed-] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt] >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> test.py::test[schema-user_schema_bind-default.txt] [GOOD] >> test.py::test[schema-user_schema_with_sort-] |99.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/py3test >> TPqWriterTest::TestCheckpoints [GOOD] >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield] [GOOD] >> test.py::test[union_all-infer_3-default.txt] |99.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> test.py::test[tpch-q7-default.txt] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-] |99.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/py3test >> test.py::test[window-win_func_on_cloned_source-default.txt] [GOOD] >> test.py::test[window-win_func_rank_by_all-] |99.4%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::test[type_v3-mixed_with_columns-] [GOOD] >> test.py::test[type_v3-non_strict-] [SKIPPED] >> test.py::test[view-file_outer_library-] [SKIPPED] >> test.py::test[view-system_udf-] >> test.py::test[union_all-infer_3-default.txt] [GOOD] >> test.py::test[view-file_outer-] [SKIPPED] >> test.py::test[weak_field-weak_field_data-] |99.5%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/py3test >> test.py::test[schema-user_schema_with_sort-] [GOOD] >> test.py::test[select-dict_lookup_column_names-default.txt] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-05-05T10:03:08.223997Z node 1 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7500900059856923818:2053], metadatafields: , partitions: 666 2025-05-05T10:03:08.408486Z node 1 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-05-05T10:03:08.408513Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-05-05T10:03:08.408517Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7500900059856923818:2053]) 2025-05-05T10:03:08.408536Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [1:7500900059856923824:2048] 2025-05-05T10:03:08.408717Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [1:7500900059856923819:2054] 2025-05-05T10:03:08.408746Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [1:7500900059856923819:2054], partIds: 666 cookie 1 2025-05-05T10:03:08.408915Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [1:7500900059856923819:2054], cookie 1 2025-05-05T10:03:08.408921Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-05-05T10:03:08.408925Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-05-05T10:03:08.408933Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [1:7500900059856923821:2056], generation 1 2025-05-05T10:03:08.408943Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [1:7500900059856923821:2056], connection id 1 partitions offsets (666 / ), 2025-05-05T10:03:08.409041Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [1:7500900059856923821:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-05-05T10:03:08.409100Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [1:7500900059856923821:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T10:03:08.409283Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [1:7500900059856923821:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T10:03:08.409294Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-05-05T10:03:08.409297Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-05-05T10:03:08.409325Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-05-05T10:03:08.409395Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-05-05T10:03:08.409398Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-05-05T10:03:08.412146Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-05-05T10:03:08.412186Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 2 NotifyCA 1 [1:7500900059856923821:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=2 has pending data 2025-05-05T10:03:08.412189Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7500900059856923824:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [1:7500900059856923821:2056] generation 1 2025-05-05T10:03:08.611605Z node 3 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [3:7500900058980500122:2053], metadatafields: , partitions: 666 2025-05-05T10:03:08.811973Z node 3 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-05-05T10:03:08.811998Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-05-05T10:03:08.812003Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([3:7500900058980500122:2053]) 2025-05-05T10:03:08.812016Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [3:7500900058980500128:2048] 2025-05-05T10:03:08.812129Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [3:7500900058980500123:2054] 2025-05-05T10:03:08.812137Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [3:7500900058980500123:2054], partIds: 666 cookie 1 2025-05-05T10:03:08.812204Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [3:7500900058980500123:2054], cookie 1 2025-05-05T10:03:08.812208Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-05-05T10:03:08.812210Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-05-05T10:03:08.812217Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [3:7500900058980500125:2056], generation 1 2025-05-05T10:03:08.812226Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [3:7500900058980500125:2056], connection id 1 partitions offsets (666 / ), 2025-05-05T10:03:08.812299Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [3:7500900058980500125:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-05-05T10:03:08.812347Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [3:7500900058980500125:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T10:03:08.812485Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [3:7500900058980500125:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T10:03:08.812494Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-05-05T10:03:08.812496Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-05-05T10:03:08.812547Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-05-05T10:03:08.812618Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-05-05T10:03:08.812621Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-05-05T10:03:08.812698Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvUndelivered, TSystem::Undelivered from [3:7500900058980500125:2056], reason Disconnected, cookie 999 2025-05-05T10:03:08.812722Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvRetry, EventQueueId 1 2025-05-05T10:03:08.812775Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [3:7500900058980500125:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T10:03:08.812838Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [3:7500900058980500125:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-05-05T10:03:08.812843Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 3 2025-05-05T10:03:08.812888Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-05-05T10:03:08.812899Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 3 2025-05-05T10:03:08.812900Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 1 rows, buffer size 0, free space 974, result size 26 2025-05-05T10:03:08.815006Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-05-05T10:03:08.815042Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 2 StartSessionAck 1 NewDataArrived 2 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 1 Retry 1 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 3 NotifyCA 2 [3:7500900058980500125:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=3 has pending data 2025-05-05T10:03:08.815045Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7500900058980500128:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [3:7500900058980500125:2056] generation 1 2025-05-05T10:03:09.060280Z node 5 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [5:7500900064333930311:2053], metadat ... Close timeout: 0.000000s 2025-05-05T10:05:08.671642Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-05-05T10:05:08.671646Z :INFO: [local] [local] [c1ac46da-2d0c03ec-a943b4e9-fd4d626b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 21 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-05T10:05:08.671652Z :NOTICE: [local] [local] [c1ac46da-2d0c03ec-a943b4e9-fd4d626b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-05-05T10:05:08.672320Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [7d7af7e0-4276982e-2f893b5d-b77a2322|920a842e-4c839b0b-2ee76c17-86f89d68_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-05-05T10:05:08.672329Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [7d7af7e0-4276982e-2f893b5d-b77a2322|920a842e-4c839b0b-2ee76c17-86f89d68_0] PartitionId [0] Generation [1] Write session will now close 2025-05-05T10:05:08.672360Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [7d7af7e0-4276982e-2f893b5d-b77a2322|920a842e-4c839b0b-2ee76c17-86f89d68_0] PartitionId [0] Generation [1] Write session: aborting 2025-05-05T10:05:08.672484Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [7d7af7e0-4276982e-2f893b5d-b77a2322|920a842e-4c839b0b-2ee76c17-86f89d68_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-05-05T10:05:08.672488Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [7d7af7e0-4276982e-2f893b5d-b77a2322|920a842e-4c839b0b-2ee76c17-86f89d68_0] PartitionId [0] Generation [1] Write session: destroy 2025-05-05T10:05:08.876767Z node 53 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Load state: { SourceId: "7d7af7e0-4276982e-2f893b5d-b77a2322" ConfirmedSeqNo: 3 EgressBytes: 3 } 2025-05-05T10:05:08.876825Z node 53 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 2. Checkpoint: 0. Finished: 0 2025-05-05T10:05:08.881441Z node 53 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 4 2025-05-05T10:05:08.881457Z node 53 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 5 2025-05-05T10:05:08.882928Z :INFO: [local] OnFederationDiscovery fall back to single mode, database=local E0505 10:05:08.883064498 28527 dns_resolver.cc:162] no server name supplied in dns URI E0505 10:05:08.883123311 28527 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// [] [] Start federated write session to database '' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "" DbInfos: [ { path: "local" endpoint: "localhost:15713" status: AVAILABLE weight: 100 } ] }2025-05-05T10:05:08.883489Z :INFO: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] Starting read session 2025-05-05T10:05:08.883501Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] Starting single session 2025-05-05T10:05:08.883543Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: try to update token 2025-05-05T10:05:08.883803Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-05T10:05:08.883810Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-05T10:05:08.883815Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] Reconnecting session to cluster in 0.000000s 2025-05-05T10:05:08.883817Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Start write session. Will connect to nodeId: 0 2025-05-05T10:05:08.885271Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: write to message_group: 7d7af7e0-4276982e-2f893b5d-b77a2322 2025-05-05T10:05:08.885296Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] Successfully connected. Initializing session 2025-05-05T10:05:08.885302Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: send init request: init_request { path: "Checkpoints" producer_id: "7d7af7e0-4276982e-2f893b5d-b77a2322" message_group_id: "7d7af7e0-4276982e-2f893b5d-b77a2322" } 2025-05-05T10:05:08.885306Z :TRACE: [local] TRACE_EVENT InitRequest 2025-05-05T10:05:08.885376Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: OnWriteDone gRpcStatusCode: 0 2025-05-05T10:05:08.886393Z :INFO: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] Server session id: test_client_1_22_1924897975035959479_v1 2025-05-05T10:05:08.886403Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-05-05T10:05:08.886454Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-05-05T10:05:08.887837Z :INFO: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2025-05-05T10:05:08.888730Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] Got ReadResponse, serverBytesSize = 785, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428015 2025-05-05T10:05:08.888766Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428015 2025-05-05T10:05:08.888873Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-05-05T10:05:08.888900Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] Returning serverBytesSize = 785 to budget 2025-05-05T10:05:08.888907Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] In ContinueReadingDataImpl, ReadSizeBudget = 785, ReadSizeServerDelta = 52428015 2025-05-05T10:05:08.889071Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-05-05T10:05:08.889096Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-05-05T10:05:08.889105Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 1} (1-1) 2025-05-05T10:05:08.889113Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 2} (2-2) 2025-05-05T10:05:08.889118Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (3-3) 2025-05-05T10:05:08.889127Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (4-4) 2025-05-05T10:05:08.889193Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-05-05T10:05:08.889209Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] Returning serverBytesSize = 0 to budget 2025-05-05T10:05:08.889213Z :INFO: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] Closing read session. Close timeout: 0.000000s 2025-05-05T10:05:08.889234Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-05-05T10:05:08.889257Z :INFO: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] Counters: { Errors: 0 CurrentSessionLifetimeMs: 5 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-05T10:05:08.889279Z :NOTICE: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-05-05T10:05:08.889287Z :DEBUG: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] [] Abort session to cluster 2025-05-05T10:05:08.889425Z :INFO: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] Closing read session. Close timeout: 0.000000s 2025-05-05T10:05:08.889430Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-05-05T10:05:08.889432Z :INFO: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] Counters: { Errors: 0 CurrentSessionLifetimeMs: 5 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-05T10:05:08.889437Z :NOTICE: [local] [local] [b2e9f6c7-a6b4a3b6-4b16753b-fda3df86] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-05-05T10:05:08.890225Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: close. Timeout 0.000000s 2025-05-05T10:05:08.890235Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session will now close 2025-05-05T10:05:08.890256Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: aborting 2025-05-05T10:05:08.890423Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: gracefully shut down, all writes complete 2025-05-05T10:05:08.890436Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-05-05T10:05:08.890474Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1746439508890 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-05-05T10:05:08.890480Z :TRACE: [local] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-05-05T10:05:08.890484Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session is aborting and will not restart 2025-05-05T10:05:08.890509Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [7d7af7e0-4276982e-2f893b5d-b77a2322] Write session: destroy 2025-05-05T10:05:09.147298Z node 54 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-05-05T10:05:09.152099Z node 54 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2025-05-05T10:05:09.152144Z node 54 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "d8adcab7-474b4a5e-f8103b2a-3aa93abc" } >> test.py::test[type_v3-ignore_v3_hint-] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort-] |99.5%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest >> test.py::test[window-win_func_rank_by_all-] [GOOD] >> test.py::test[window-win_fuse_window-default.txt] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic] [SKIPPED] |99.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part9/py3test >> test.py::test[ypath-empty_range-dynamic] [SKIPPED] >> test.py::test[weak_field-weak_field_data-] [GOOD] >> test.py::test[weak_field-weak_field_long_fields-] >> test_tpch.py::TestTpchS1::test_tpch[3] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] >> test.py::test[view-system_udf-] [GOOD] >> test.py::test[view-view_with_lambda_process-] >> test.py::test[select-dict_lookup_column_names-default.txt] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt] >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_PROFILE-client0] >> YdbWorkloadTopic::WriteProducesToAllPartitionsEvenly [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Run >> test.py::test[type_v3-mergejoin_with_sort-] [GOOD] >> test.py::test[udf-udf-] [SKIPPED] >> test.py::test[weak_field-weak_field_wrong_types_fail-] [SKIPPED] >> test.py::test[window-current/aggregations_leadlag-] |99.5%| [TA] {RESULT} $(B)/ydb/tests/functional/ydb_cli/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::test[weak_field-weak_field_long_fields-] [GOOD] >> test.py::test[window-current/aggregations-] >> test.py::test[select-dot_name_subrequest-default.txt] [GOOD] >> test.py::test[select-host_count-] [SKIPPED] >> test.py::test[select-struct_members-default.txt] >> test.py::test[view-view_with_lambda_process-] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson-] >> test.py::test[window-current/aggregations-] [GOOD] >> test.py::test[window-current/session_aliases-] >> test.py::test[window-current/aggregations_leadlag-] [GOOD] >> test.py::test[window-distinct_over_window-] [SKIPPED] >> test.py::test[window-win_func_first_last_with_part-] >> test.py::test[select-struct_members-default.txt] [GOOD] >> test.py::test[select-trivial_where-one] >> test.py::test[weak_field-weak_field_esc_yson-] [GOOD] >> test.py::test[weak_field-weak_field_rest-] >> test.py::test[window-win_func_first_last_with_part-] [GOOD] >> test.py::test[window-win_multiaggr_library-] [SKIPPED] |99.5%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/py3test >> test.py::test[window-current/session_aliases-] [GOOD] >> test.py::test[window-generic/aggregations_include_current-] |99.5%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/py3test |99.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part7/py3test >> test.py::test[window-win_multiaggr_library-] [SKIPPED] >> test.py::test[select-trivial_where-one] [GOOD] >> test.py::test[select-unlabeled-] >> test.py::test[weak_field-weak_field_rest-] [GOOD] >> test.py::test[window-empty/aggregations_leadlag-] >> KqpFederatedQuery::ExecuteScriptWithThinFile [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalDataSourceGenericQuery >> test_tpch.py::TestTpchS1::test_tpch[4] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] >> test.py::test[window-generic/aggregations_include_current-] [GOOD] >> test.py::test[window-leading/aggregations-] >> KqpFederatedQuery::CreateTableAsSelectFromExternalDataSourceGenericQuery [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalDataSourceGenericScript >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_PROFILE-client0] [GOOD] >> test.py::test[window-empty/aggregations_leadlag-] [GOOD] >> test.py::test[window-full/noncompact_with_tablerow-] >> test.py::test[select-unlabeled-] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_base-default.txt] >> test.py::test[window-leading/aggregations-] [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalDataSourceGenericScript [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalTableGenericQuery >> test.py::test[window-full/noncompact_with_tablerow-] [GOOD] >> test.py::test[window-full/session_incompat_sort-] |99.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part12/py3test >> test.py::test[window-leading/aggregations-] [GOOD] |99.5%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/py3test >> test.py::test[simple_columns-simple_columns_base-default.txt] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt] [SKIPPED] >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt] >> KqpFederatedQuery::CreateTableAsSelectFromExternalTableGenericQuery [GOOD] >> KqpFederatedQuery::CreateTableAsSelectFromExternalTableGenericScript >> test.py::test[window-full/session_incompat_sort-] [GOOD] >> test.py::test[window-win_multiaggr-default.txt] |99.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.6%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test >> KqpFederatedQuery::CreateTableAsSelectFromExternalTableGenericScript [GOOD] >> KqpFederatedQuery::OverridePlannerDefaults >> test.py::test[simple_columns-simple_columns_qualified_all_and_group_by-default.txt] [GOOD] >> test.py::test[table_range-merge_non_strict-] [SKIPPED] >> test.py::test[table_range-range_over_desc-] [SKIPPED] >> test.py::test[type_v3-append_diff_flags-] [SKIPPED] >> test.py::test[type_v3-decimal_yt-] [SKIPPED] >> test.py::test[type_v3-ignore_v3_hint-tag_opt] >> KqpFederatedQuery::OverridePlannerDefaults [GOOD] >> KqpFederatedQuery::TestReadEmptyFileWithCsvFormat >> test.py::test[window-win_multiaggr-default.txt] [GOOD] >> test.py::test[ypath-multi_range-default.txt] >> KqpFederatedQuery::TestReadEmptyFileWithCsvFormat [GOOD] >> KqpFederatedQuery::TestWildcardValidation >> test.py::test[type_v3-ignore_v3_hint-tag_opt] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs-] [SKIPPED] >> test.py::test[union-union_multiin-] >> test.py::test[ypath-multi_range-default.txt] [GOOD] >> KqpFederatedQuery::TestWildcardValidation [GOOD] >> KqpFederatedQuery::TestSecretsExistingValidation |99.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part19/py3test >> test.py::test[ypath-multi_range-default.txt] [GOOD] |99.6%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/py3test >> test.py::test[union-union_multiin-] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> KqpFederatedQuery::TestSecretsExistingValidation [GOOD] >> KqpFederatedQuery::TestOlapToS3Insert >> test.py::test[union_all-union_all_fields-default.txt] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt] >> KqpFederatedQuery::TestOlapToS3Insert [GOOD] >> KqpFederatedQuery::TestReadLargeParquetFile |99.6%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/py3test >> test_stats_mode.py::TestStatsMode::test_mode[v2-STATS_MODE_PROFILE-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ac2/ydb/tests/fq/plans/test-results/py3test/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000ac2/ydb/tests/fq/plans/test-results/py3test/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=4174768) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 4177002 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test.py::test[union_all-union_all_with_parenthesis-default.txt] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme-] |99.6%| [TM] {RESULT} ydb/tests/fq/plans/py3test >> test.py::test[weak_field-weak_field_infer_scheme-] [GOOD] >> test.py::test[window-current/ansi_current-] >> test_workload.py::TestYdbLogWorkload::test[row] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] |99.6%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part15/test-results/pytest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000b83/ydb/tests/fq/restarts/test-results/py3test/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000b83/ydb/tests/fq/restarts/test-results/py3test/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=4125245) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 4127548 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |99.6%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/py3test |99.6%| [TM] {RESULT} ydb/tests/fq/restarts/py3test >> test.py::test[window-current/ansi_current-] [GOOD] >> test.py::test[window-full/session_compact-] |99.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part2/test-results/pytest/{meta.json ... results_accumulator.log} >> KqpFederatedQuery::TestReadLargeParquetFile [GOOD] >> KqpFederatedQuery::TestLocalReadLargeParquetFile |99.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part6/test-results/pytest/{meta.json ... results_accumulator.log} >> test.py::test[window-full/session_compact-] [GOOD] >> test.py::test[window-win_func_into_udf-] >> test.py::test[window-win_func_into_udf-] [GOOD] >> test.py::test[window-yql-14479-default.txt] [SKIPPED] >> test.py::test[ypath-limit_with_key-default.txt] [SKIPPED] |99.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part11/test-results/pytest/{meta.json ... results_accumulator.log} |99.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part15/py3test >> test.py::test[ypath-limit_with_key-default.txt] [SKIPPED] |99.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/test-results/pytest/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part16/test-results/pytest/{meta.json ... results_accumulator.log} >> KqpFederatedQuery::TestLocalReadLargeParquetFile [GOOD] >> KqpFederatedSchemeTest::ExternalTableDdl >> YdbWorkloadTransferTopicToTable::Default_Run [GOOD] >> YdbWorkloadTransferTopicToTable::Default_Init_Clean >> KqpFederatedSchemeTest::ExternalTableDdl [GOOD] >> KqpFederatedSchemeTest::InvalidDropForExternalTableWithAuth |99.7%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/py3test |99.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/test-results/pytest/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/test-results/pytest/{meta.json ... results_accumulator.log} >> KqpFederatedSchemeTest::InvalidDropForExternalTableWithAuth [GOOD] >> KqpFederatedSchemeTest::ExternalTableDdlLocationValidation >> KqpFederatedSchemeTest::ExternalTableDdlLocationValidation [GOOD] >> KqpS3PlanTest::S3Source >> KqpS3PlanTest::S3Source [GOOD] >> KqpS3PlanTest::S3Sink >> KqpS3PlanTest::S3Sink [GOOD] >> KqpS3PlanTest::S3CreateTableAsSelect >> KqpS3PlanTest::S3CreateTableAsSelect [GOOD] >> KqpS3PlanTest::S3Insert >> KqpS3PlanTest::S3Insert [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/s3/unittest >> KqpS3PlanTest::S3Insert [GOOD] Test command err: Trying to start YDB, gRPC: 29608, MsgBus: 3724 2025-05-05T10:02:20.548411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500899852551806090:2217];send_to=[0:7307199536658146131:7762515]; 2025-05-05T10:02:20.548459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ad9/r3tmp/tmpv3q6df/pdisk_1.dat 2025-05-05T10:02:20.618972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29608, node 1 2025-05-05T10:02:20.635117Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T10:02:20.635133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T10:02:20.635135Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T10:02:20.635183Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-05-05T10:02:20.647180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T10:02:20.647212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T10:02:20.651314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3724 TClient is connected to server localhost:3724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T10:02:20.715947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T10:02:20.727026Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T10:02:20.943426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500899852551806551:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:20.943453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:20.983262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T10:02:20.985045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T10:02:20.994715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 2025-05-05T10:02:20.995280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-05-05T10:02:20.995558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-05-05T10:02:21.155561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500899856846774138:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:21.155584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:21.155637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500899856846774143:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:21.156479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:2, at schemeshard: 72057594046644480 2025-05-05T10:02:21.158219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500899856846774145:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2025-05-05T10:02:21.236358Z node 1 :TX_PROXY ERROR: Actor# [1:7500899856846774185:2516] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-05T10:02:21.731658Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746439341773, txId: 281474976715682] shutting down Trying to start YDB, gRPC: 28600, MsgBus: 61104 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/000ad9/r3tmp/tmp5oS6KW/pdisk_1.dat 2025-05-05T10:02:22.150831Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-05-05T10:02:22.159126Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28600, node 2 2025-05-05T10:02:22.184382Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T10:02:22.184394Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T10:02:22.184396Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T10:02:22.184443Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61104 TClient is connected to server localhost:61104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T10:02:22.248593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T10:02:22.248639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T10:02:22.249257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-05-05T10:02:22.251151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-05-05T10:02:22.258232Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-05-05T10:02:22.527622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500899859315496313:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:22.527649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:22.528656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T10:02:22.530958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-05-05T10:02:22.558734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500899859315496363:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:22.558791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:22.559352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500899859315496368:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:02:22.564151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-05-05T10:02:22.567194Z node 2 :FLAT_TX_SCHEME ... tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.112817Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.113477Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.113758Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.114382Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.114705Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.115279Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.115588Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.116229Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.116471Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.117184Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.117372Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.118130Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.118305Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.119171Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.119492Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.120151Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.120539Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.121025Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.121455Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.121898Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.122399Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.122767Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.123476Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.123779Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.124413Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.124691Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; 2025-05-05T10:05:56.125719Z node 53 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:214;event=finished_tx;tx_id=281474976715661; Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olap_source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["data (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/olap_source","E-Rows":"No estimate","Table":"olap_source","ReadColumns":["data"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"FirstMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"Bytes":{"Count":1,"Sum":15,"Max":15,"Min":15}},"Name":"3","Push":{"LastMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"FirstMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"PauseMessageMs":{"Count":1,"Sum":31,"Max":31,"Min":31},"WaitTimeUs":{"Count":42,"Sum":1267664,"Max":39149,"Min":19569},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":42,"Min":31}}}],"MaxMemoryUsage":{"Count":42,"Sum":44040192,"Max":1048576,"Min":1048576},"IngressBytes":{"Count":1,"Sum":21,"Max":21,"Min":21},"Tasks":42,"OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"FinishedTasks":42,"IngressRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/olap_source","ReadRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ReadBytes":{"Count":1,"Sum":21,"Max":21,"Min":21}}],"BaseTimeMs":1746439556174,"OutputBytes":{"Count":1,"Sum":15,"Max":15,"Min":15},"CpuTimeUs":{"Count":42,"Sum":3466,"Max":224,"Min":60},"Ingress":[{"Pop":{},"External":{"PartitionCount":1,"LastMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"ExternalRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ExternalBytes":{"Count":1,"Sum":21,"Max":21,"Min":21},"FirstMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42}},"Name":"CS","Ingress":{"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"FirstMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"Bytes":{"Count":1,"Sum":21,"Max":21,"Min":21}},"Push":{}}],"UpdateTimeMs":43}}],"Node Type":"Map","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[],"ExternalDataSource":"insert_data_sink","Extension":".parquet","Name":"Write insert_data_sink","SinkType":"s3"}],"Node Type":"Stage-Sink","Stats":{"UseLlvm":"undefined","MaxMemoryUsage":{"Count":42,"Sum":44040192,"Max":1048576,"Min":1048576},"DurationUs":{"Count":1,"Sum":8000,"Max":8000,"Min":8000},"InputBytes":{"Count":1,"Sum":15,"Max":15,"Min":15},"Tasks":42,"FinishedTasks":42,"Egress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":43,"Max":43,"Min":43},"FirstMessageMs":{"Count":1,"Sum":43,"Max":43,"Min":43},"Bytes":{"Count":1,"Sum":660,"Max":660,"Min":660}},"Name":"S3Sink","Egress":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Splits":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":42,"Sum":1514,"Max":50,"Min":32},"ActiveMessageMs":{"Count":42,"Max":50,"Min":32},"FirstMessageMs":{"Count":42,"Sum":1507,"Max":43,"Min":32},"Bytes":{"Count":1,"Sum":328,"Max":328,"Min":328},"ActiveTimeUs":{"Count":1,"Sum":7000,"Max":7000,"Min":7000}},"Push":{"LastMessageMs":{"Count":1,"Sum":43,"Max":43,"Min":43},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":43,"Max":43,"Min":43},"FirstMessageMs":{"Count":1,"Sum":43,"Max":43,"Min":43},"Bytes":{"Count":1,"Sum":660,"Max":660,"Min":660},"PauseMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"WaitTimeUs":{"Count":42,"Sum":1442711,"Max":42725,"Min":29517},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":43,"Min":42}}}],"InputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"PhysicalStageId":1,"StageDurationUs":18000,"EgressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"BaseTimeMs":1746439556174,"EgressBytes":{"Count":1,"Sum":328,"Max":328,"Min":328},"CpuTimeUs":{"Count":42,"Sum":1942,"Max":297,"Min":29},"UpdateTimeMs":50,"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"FirstMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"Bytes":{"Count":1,"Sum":15,"Max":15,"Min":15}},"Name":"1","Push":{"LastMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"FirstMessageMs":{"Count":1,"Sum":42,"Max":42,"Min":42},"Bytes":{"Count":1,"Sum":15,"Max":15,"Min":15},"PauseMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"WaitTimeUs":{"Count":42,"Sum":1442362,"Max":42349,"Min":29514},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":42,"Min":11}}}]}}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":25359,"CpuTimeUs":24518},"ProcessCpuTimeUs":124,"TotalDurationUs":82522,"ResourcePoolId":"default","QueuedTimeUs":98},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"ExternalDataSource":"insert_data_sink","Extension":".parquet","A-SelfCpu":0.297,"A-Cpu":0.297,"Name":"Write insert_data_sink","SinkType":"s3"}],"Node Type":"Write insert_data_sink"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query"}} |99.8%| [TM] {RESULT} ydb/core/kqp/ut/federated_query/s3/unittest >> test_log_scenario.py::TestLogScenario::test[180] [GOOD] >> test_log_scenario.py::TestLogScenario::test[1051200] >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.8%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> YdbWorkloadTransferTopicToTable::Default_Init_Clean [GOOD] >> YdbWorkloadTransferTopicToTable::Specific_Init_Clean >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[6] >> test_tpch.py::TestTpchS1::test_tpch[6] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] |99.8%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test >> test_tpch.py::TestTpchS1::test_tpch[7] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> YdbWorkloadTransferTopicToTable::Specific_Init_Clean [GOOD] >> YdbWorkloadTransferTopicToTable::Clean_Without_Init >> test_tpch.py::TestTpchS1::test_tpch[8] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] >> test_tpch.py::TestTpchS1::test_tpch[9] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] >> YdbWorkloadTransferTopicToTable::Clean_Without_Init [GOOD] >> YdbWorkloadTransferTopicToTable::Double_Init >> test_tpch.py::TestTpchS1::test_tpch[10] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] >> test_tpch.py::TestTpchS1::test_tpch[11] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [GOOD] >> test_workload.py::TestYdbWorkload::test [GOOD] >> YdbWorkloadTransferTopicToTable::Double_Init [GOOD] >> YdbWorkloadTransferTopicToTable::Statistics >> test_tpch.py::TestTpchS1::test_tpch[12] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[13] >> test_tpch.py::TestTpchS1::test_tpch[13] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[14] >> test_tpch.py::TestTpchS1::test_tpch[14] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] >> test_result_limits.py::TestResultLimits::test_large_row |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.8%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test >> test_tpch.py::TestTpchS1::test_tpch[15] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[16] >> test_tpch.py::TestTpchS1::test_tpch[16] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> test_result_limits.py::TestResultLimits::test_large_row [GOOD] |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.8%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> YdbWorkloadTransferTopicToTable::Statistics [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] |99.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/apps/ydb/ut/unittest >> YdbWorkloadTransferTopicToTable::Statistics [GOOD] |99.9%| [TM] {RESULT} ydb/apps/ydb/ut/unittest >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=4125462) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback |99.9%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test >> test_log_scenario.py::TestLogScenario::test[1051200] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [GOOD] >> S3AwsCredentials::ExecuteScriptWithEqSymbol >> S3AwsCredentials::ExecuteScriptWithEqSymbol [GOOD] >> S3AwsCredentials::TestInsertEscaping >> zip_bomb.py::TestZipBomb::test >> S3AwsCredentials::TestInsertEscaping [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/external_sources/s3/ut/unittest >> S3AwsCredentials::TestInsertEscaping [GOOD] Test command err: Trying to start YDB, gRPC: 18835, MsgBus: 13052 2025-05-05T10:08:01.036788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7500901315563167939:2202];send_to=[0:7307199536658146131:7762515]; 2025-05-05T10:08:01.036849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/001030/r3tmp/tmpz7SkxT/pdisk_1.dat 2025-05-05T10:08:01.113965Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18835, node 1 2025-05-05T10:08:01.139700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T10:08:01.139721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T10:08:01.141300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T10:08:01.160068Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T10:08:01.160083Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T10:08:01.160662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T10:08:01.160713Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13052 TClient is connected to server localhost:13052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T10:08:01.252713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T10:08:01.258267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-05-05T10:08:01.759826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500901315563168442:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:01.759849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:02.041766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-05-05T10:08:02.111475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500901319858135871:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:02.111498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:02.111544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7500901319858135876:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:02.112428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:2, at schemeshard: 72057594046644480 2025-05-05T10:08:02.113318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7500901319858135878:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-05-05T10:08:02.193019Z node 1 :TX_PROXY ERROR: Actor# [1:7500901319858135918:2396] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-05T10:08:02.352565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.392891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-05-05T10:08:02.438080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.484908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.524877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.567216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-05-05T10:08:02.577622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.868232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.877654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.881743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.882009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-05-05T10:08:02.882159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-05-05T10:08:03.023377Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7500901324153104670:2701], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
: Error: The specified bucket does not exist, error: code: NoSuchBucket, request id: [6f7b5910-8fdf2ce5-80af3a89-8728bc96] 2025-05-05T10:08:03.024815Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmYyNWE0MGEtYjIzYWVjYTMtMWI1NWViYjktNWEzZmI5OGI=, ActorId: [1:7500901324153104667:2700], ActorState: ExecuteState, TraceId: 01jtfynet0dngg5y3cxs0ah22r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-05T10:08:04.145026Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7500901328448072480:2887], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
: Error: The specified bucket does not exist, error: code: NoSuchBucket, request id: [3503b540-e26bfe0e-c22b72e3-65be0ef0] 2025-05-05T10:08:04.145086Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmE3MTY4ZWItNDZjNWY2MzgtZjY2MzM1OWMtOGU2MGNlZGQ=, ActorId: [1:7500901328448072478:2886], ActorState: ExecuteState, TraceId: 01jtfyng198hbr7n7pd7yb0wk4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-05T10:08:05.400789Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746439685431, txId: 281474976715761] shutting down 2025-05-05T10:08:05.463080Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746439685508, txId: 281474976715773] shutting down 2025-05-05T10:08:05.480268Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7500901332743041002:3285], status: GENERIC_ERROR, issues:
: Error: Table metadata loading, code: 1050
:2:21: Error: Failed to load metadata for table: /Root/external_data_source.[/a/]
: Error: secret with name 'id' not found 2025-05-05T10:08:05.480317Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTgwOWZhODEtNzk3MzNjZjEtNmI2MThkYjYtN2M2NDdlNWY=, ActorId: [1:7500901332743041000:3284], ActorState: ExecuteState, TraceId: 01jtfynhar2ema5tm3ak56tzs2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-05-05T10:08:05.497192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715784:0, at schemeshard: 72057594046644480 2025-05-05T10:08:05.542240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715787:0, at schemeshard: 72057594046644480 2025-05-05T10:08:05.594606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715790:0, at schemeshard: 72057594046644480 2025-05-05T10:08:05.637765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715793:0, at schemeshard: 72057594046644480 2025-05-05T10:08:05.887096Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746439685928, txId: 281474976715822] shutting down 2025-05-05T10:08:05.941725Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746439685984, txId: 281474976715836] shutting down Trying to start YDB, gRPC: 7930, MsgBus: 25600 2025-05-05T10:08:06.164024Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7500901340056266717:2063];send_to=[0:7307199536658146131:7762515]; 2025-05-05T10:08:06.164041Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/kpz1/001030/r3tmp/tmpkqTN5A/pdisk_1.dat 2025-05-05T10:08:06.171412Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7930, node 2 2025-05-05T10:08:06.181260Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-05-05T10:08:06.181271Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-05-05T10:08:06.181273Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-05-05T10:08:06.181294Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25600 TClient is connected to server localhost:25600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-05-05T10:08:06.266405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-05-05T10:08:06.266430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-05-05T10:08:06.267131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-05-05T10:08:06.267481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-05-05T10:08:06.870066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500901340056267351:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:06.870088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:07.165456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-05-05T10:08:07.172404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500901344351234776:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:07.172419Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7500901344351234781:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:07.172427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-05-05T10:08:07.172846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-05-05T10:08:07.174093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7500901344351234783:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-05-05T10:08:07.263359Z node 2 :TX_PROXY ERROR: Actor# [2:7500901344351234824:2387] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-05-05T10:08:07.303246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-05-05T10:08:07.344076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2025-05-05T10:08:07.387934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-05-05T10:08:07.480395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-05-05T10:08:07.519364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-05-05T10:08:07.558512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-05-05T10:08:07.566964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-05-05T10:08:07.810710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-05-05T10:08:07.819979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-05-05T10:08:07.820153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-05-05T10:08:07.820285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-05-05T10:08:08.192341Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1746439688238, txId: 281474976715735] shutting down 2025-05-05T10:08:08.199963Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7500901348646204490:3005], status: INTERNAL_ERROR, issues:
: Fatal: Table metadata loading, code: 1050
:2:17: Fatal: Failed to load metadata for table: /Root/external_data_source.[exp_folder/some_ !"#$%&'()+,-./0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_abcdefghijklmnopqrstuvwxyz|~`/]
: Fatal: couldn't load table metadata: parameter is not supported with type inference: data.datetime.format, code: 1 2025-05-05T10:08:08.200030Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGFjMzQ0MmItMzAyNzJmMC02YmU2YjA1NC01NDc3OTNlOQ==, ActorId: [2:7500901348646204488:3004], ActorState: ExecuteState, TraceId: 01jtfynm014x3th2thhv6f6z1d, ReplyQueryCompileError, status INTERNAL_ERROR remove tx with tx_id: |99.9%| [TM] {RESULT} ydb/core/external_sources/s3/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000c46/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/kpz1/000c46/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 4120751 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback |99.9%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test >> test_tpch.py::TestTpchS1::test_tpch[21] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] >> zip_bomb.py::TestZipBomb::test [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |99.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/py3test >> zip_bomb.py::TestZipBomb::test [GOOD] Test command err: Pid 40960 upsert #0 ok, result: [] upsert #1 ok, result: [] Rss after upsert 636744 [{'column0': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx1', 'column1': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx2', 'column2': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ... xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx3', 'column3': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx4', 'column4': b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx5'}] Max rss {} 3471264 |99.9%| [TM] {RESULT} ydb/tests/olap/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression [GOOD] >> alter_compression.py::TestAlterCompression::test_availability_data >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |99.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] |99.9%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] 2025-05-05 10:13:03,776 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 10:13:03,956 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 4157773 1.1G 1.1G 1.1G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/kpz1/000581/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 42507 1.8G 1.8G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/kpz1/000581/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: test_suffix, num 0, table path read_update_write_load start_time 1746439389.6857557 Path read_update_write_load removed Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 5073 30% 5073 50% 5073 90% 5073 99% 5073 ms Write: 10% 9755 30% 9755 50% 9755 90% 9755 99% 9755 ms Write: 10% 13288 30% 13288 50% 13288 90% 13288 99% 13288 ms Write: 10% 12059 30% 12059 50% 12059 90% 12059 99% 12059 ms Write: 10% 11380 30% 11380 50% 11380 90% 11380 99% 11380 ms Write: 10% 11391 30% 11391 50% 11391 90% 11391 99% 11391 ms Write: 10% 12697 30% 12697 50% 12697 90% 12697 99% 12697 ms Write: 10% 11993 30% 11993 50% 11993 90% 11993 99% 11993 ms Write: 10% 12473 30% 12473 50% 12473 90% 12473 99% 12473 ms Write: 10% 13853 30% 13853 50% 13853 90% 13853 99% 13853 ms Write: 10% 12297 30% 12297 50% 12297 90% 12297 99% 12297 ms Write: 10% 12088 30% 12088 50% 12088 90% 12088 99% 12088 ms Write: 10% 11535 30% 11535 50% 11535 90% 11535 99% 11535 ms Write: 10% 11071 30% 11071 50% 11071 90% 11071 99% 11071 ms Write: 10% 12040 30% 12040 50% 12040 90% 12040 99% 12040 ms Write: 10% 11037 30% 11037 50% 11037 90% 11037 99% 11037 ms Write: 10% 11213 30% 11213 50% 11213 90% 11213 99% 11213 ms Write: 10% 10620 30% 10620 50% 10620 90% 10620 99% 10620 ms Write: 10% 10775 30% 10775 50% 10775 90% 10775 99% 10775 ms Write: 10% 10709 30% 10709 50% 10709 90% 10709 99% 10709 ms Write: 10% 10651 30% 10651 50% 10651 90% 10651 99% 10651 ms Write: 10% 5394 30% 5394 50% 5394 90% 5394 99% 5394 ms Write: 10% 10548 30% 10548 50% 10548 90% 10548 99% 10548 ms Write: 10% 10400 30% 10400 50% 10400 90% 10400 99% 10400 ms Write: 10% 10056 30% 10056 50% 10056 90% 10056 99% 10056 ms Write: 10% 8899 30% 8899 50% 8899 90% 8899 99% 8899 ms Write: 10% 9523 30% 9523 50% 9523 90% 9523 99% 9523 ms Write: 10% 9155 30% 9155 50% 9155 90% 9155 99% 9155 ms Write: 10% 10269 30% 10269 50% 10269 90% 10269 99% 10269 ms Write: 10% 8877 30% 8877 50% 8877 90% 8877 99% 8877 ms Write: 10% 8839 30% 8839 50% 8839 90% 8839 99% 8839 ms Write: 10% 3576 30% 3576 50% 3576 90% 3576 99% 3576 ms Write: 10% 8756 30% 8756 50% 8756 90% 8756 99% 8756 ms Write: 10% 8636 30% 8636 50% 8636 90% 8636 99% 8636 ms Write: 10% 8471 30% 8471 50% 8471 90% 8471 99% 8471 ms Write: 10% 7062 30% 7062 50% 7062 90% 7062 99% 7062 ms Write: 10% 8215 30% 8215 50% 8215 90% 8215 99% 8215 ms Write: 10% 7683 30% 7683 50% 7683 90% 7683 99% 7683 ms Write: 10% 7812 30% 7812 50% 7812 90% 7812 99% 7812 ms Write: 10% 6303 30% 6303 50% 6303 90% 6303 99% 6303 ms Write: 10% 7113 30% 7113 50% 7113 90% 7113 99% 7113 ms Write: 10% 6247 30% 6247 50% 6247 90% 6247 99% 6247 ms Write: 10% 6583 30% 6583 50% 6583 90% 6583 99% 6583 ms Write: 10% 6137 30% 6137 50% 6137 90% 6137 99% 6137 ms Write: 10% 5764 30% 5764 50% 5764 90% 5764 99% 5764 ms Write: 10% 5219 30% 5219 50% 5219 90% 5219 99% 5219 ms Write: 10% 5371 30% 5371 50% 5371 90% 5371 99% 5371 ms Write: 10% 4535 30% 4535 50% 4535 90% 4535 99% 4535 ms Write: 10% 5011 30% 5011 50% 5011 90% 5011 99% 5011 ms Write: 10% 4328 30% 4328 50% 4328 90% 4328 99% 4328 ms Write: 10% 4408 30% 4408 50% 4408 90% 4408 99% 4408 ms Write: 10% 3953 30% 3953 50% 3953 90% 3953 99% 3953 ms Write: 10% 4158 30% 4158 50% 4158 90% 4158 99% 4158 ms Write: 10% 5211 30% 5211 50% 5211 90% 5211 99% 5211 ms Write: 10% 9129 30% 9129 50% 9129 90% 9129 99% 9129 ms Write: 10% 3909 30% 3909 50% 3909 90% 3909 99% 3909 ms Write: 10% 3902 30% 3902 50% 3902 90% 3902 99% 3902 ms Write: 10% 3880 30% 3880 50% 3880 90% 3880 99% 3880 ms Write: 10% 3841 30% 3841 50% 3841 90% 3841 99% 3841 ms Write: 10% 3940 30% 3940 50% 3940 90% 3940 99% 3940 ms Write: 10% 3641 30% 3641 50% 3641 90% 3641 99% 3641 ms Write: 10% 3778 30% 3778 50% 3778 90% 3778 99% 3778 ms Write: 10% 5969 30% 5969 50% 5969 90% 5969 99% 5969 ms Write: 10% 5121 30% 5121 50% 5121 90% 5121 99% 5121 ms Step 2. read write Write: 10% 10427 30% 10427 50% 10427 90% 10427 99% 10427 ms Write: 10% 11474 30% 11474 50% 11474 90% 11474 99% 11474 ms Write: 10% 12104 30% 12104 50% 12104 90% 12104 99% 12104 ms Write: 10% 12859 30% 12859 50% 12859 90% 12859 99% 12859 ms Write: 10% 10872 30% 10872 50% 10872 90% 10872 99% 10872 ms Write: 10% 14230 30% 14230 50% 14230 90% 14230 99% 14230 ms Write: 10% 14784 30% 14784 50% 14784 90% 14784 99% 14784 ms Write: 10% 14078 30% 14078 50% 14078 90% 14078 99% 14078 ms Write: 10% 13006 30% 13006 50% 13006 90% 13006 99% 13006 ms Write: 10% 14335 30% 14335 50% 14335 90% 14335 99% 14335 ms Write: 10% 9578 30% 9578 50% 9578 90% 9578 99% 9578 ms Write: 10% 7329 30% 7329 50% 7329 90% 7329 99% 7329 ms Write: 10% 12962 30% 12962 50% 12962 90% 12962 99% 12962 ms Write: 10% 7380 30% 7380 50% 7380 90% 7380 99% 7380 ms Write: 10% 12539 30% 12539 50% 12539 90% 12539 99% 12539 ms Write: 10% 12644 30% 12644 50% 12644 90% 12644 99% 12644 ms Write: 10% 12767 30% 12767 50% 12767 90% 12767 99% 12767 ms Write: 10% 11991 30% 11991 50% 11991 90% 11991 99% 11991 ms Write: 10% 11702 30% 11702 50% 11702 90% 11702 99% 11702 ms Write: 10% 11534 30% 11534 50% 11534 90% 11534 99% 11534 ms Write: 10% 11024 30% 11024 50% 11024 90% 11024 99% 11024 ms Write: 10% 11147 30% 11147 50% 11147 90% 11147 99% 11147 ms Write: 10% 15239 30% 15239 50% 15239 90% 15239 99% 15239 ms Write: 10% 11008 30% 11008 50% 11008 90% 11008 99% 11008 ms Write: 10% 9837 30% 9837 50% 9837 90% 9837 99% 9837 ms Write: 10% 9729 30% 9729 50% 9729 90% 9729 99% 9729 ms Write: 10% 8962 30% 8962 50% 8962 90% 8962 99% 8962 ms Write: 10% 9012 30% 9012 50% 9012 90% 9012 99% 9012 ms Write: 10% 9850 30% 9850 50% 9850 90% 9850 99% 9850 ms Write: 10% 4570 30% 4570 50% 4570 90% 4570 99% 4570 ms Write: 10% 9091 30% 9091 50% 9091 90% 9091 99% 9091 ms Write: 10% 8750 30% 8750 50% 8750 90% 8750 99% 8750 ms Write: 10% 8890 30% 8890 50% 8890 90% 8890 99% 8890 ms Write: 10% 8166 30% 8166 50% 8166 90% 8166 99% 8166 ms Write: 10% 7707 30% 7707 50% 7707 90% 7707 99% 7707 ms Write: 10% 8329 30% 8329 50% 8329 90% 8329 99% 8329 ms Write: 10% 8222 30% 8222 50% 8222 90% 8222 99% 8222 ms Write: 10% 7700 30% 7700 50% 7700 90% 7700 99% 7700 ms Write: 10% 7311 30% 7311 50% 7311 90% 7311 99% 7311 ms Write: 10% 6969 30% 6969 50% 6969 90% 6969 99% 6969 ms Write: 10% 6795 30% 6795 50% 6795 90% 6795 99% 6795 ms Write: 10% 7157 30% 7157 50% 7157 90% 7157 99% 7157 ms Write: 10% 6591 30% 6591 50% 6591 90% 6591 99% 6591 ms Write: 10% 5957 30% 5957 50% 5957 90% 5957 99% 5957 ms Write: 10% 5998 30% 5998 50% 5998 90% 5998 99% 5998 ms Write: 10% 5781 30% 5781 50% 5781 90% 5781 99% 5781 ms Write: 10% 6018 30% 6018 50% 6018 90% 6018 99% 6018 ms Write: 10% 6129 30% 6129 50% 6129 90% 6129 99% 6129 ms Write: 10% 5503 30% 5503 50% 5503 90% 5503 99% 5503 ms Write: 10% 4899 30% 4899 50% 4899 90% 4899 99% 4899 ms Write: 10% 5448 30% 5448 50% 5448 90% 5448 99% 5448 ms Write: 10% 5221 30% 5221 50% 5221 90% 5221 99% 5221 ms Write: 10% 5491 30% 5491 50% 5491 90% 5491 99% 5491 ms Write: 10% 5215 30% 5215 50% 5215 90% 5215 99% 5215 ms Write: 10% 5075 30% 5075 50% 5075 90% 5075 99% 5075 ms Write: 10% 5426 30% 5426 50% 5426 90% 5426 99% 5426 ms Write: 10% 10101 30% 10101 50% 10101 90% 10101 99% 10101 ms Write: 10% 5042 30% 5042 50% 5042 90% 5042 99% 5042 ms Write: 10% 4850 30% 4850 50% 4850 90% 4850 99% 4850 ms Write: 10% 4875 30% 4875 50% 4875 90% 4875 99% 4875 ms Write: 10% 8989 30% 8989 50% 8989 90% 8989 99% 8989 ms Write: 10% 8849 30% 8849 50% 8849 90% 8849 99% 8849 ms Write: 10% 7803 30% 7803 50% 7803 90% 7803 99% 7803 ms Write: 10% 6473 30% 6473 50% 6473 90% 6473 99% 6473 ms Read: 10% 5612 30% 9055 50% 12497 90% 19382 99% 20931 ms Step 3. write modify Write: 10% 12276 30% 12276 50% 12276 90% 12276 99% 12276 ms Write: 10% 13622 30% 13622 50% 13622 90% 13622 99% 13622 ms Write: 10% 14469 30% 14469 50% 14469 90% 14469 99% 14469 ms Write: 10% 13939 30% 13939 50% 13939 90% 13939 99% 13939 ms Write: 10% 14208 30% 14208 50% 14208 90% 14208 99% 14208 ms Write: 10% 14100 30% 14100 50% 14100 90% 14100 99% 14100 ms Write: 10% 13594 30% 13594 50% 13594 90% 13594 99% 13594 ms Write: 10% 14013 30% 14013 50% 14013 90% 14013 99% 14013 ms Write: 10% 13935 30% 13935 50% 13935 90% 13935 99% 13935 ms Write: 10% 14113 30% 14113 50% 14113 90% 14113 99% 14113 ms Write: 10% 14498 30% 14498 50% 14498 90% 14498 99% 14498 ms Write: 10% 14292 30% 14292 50% 14292 90% 14292 99% 14292 ms Write: 10% 14411 30% 14411 50% 14411 90% 14411 99% 14411 ms Write: 10% 14567 30% 14567 50% 14567 90% 14567 99% 14567 ms Write: 10% 12886 30% 12886 50% 12886 90% 12886 99% 12886 ms Write: 10% 13270 30% 13270 50% 13270 90% 13270 99% 13270 ms Write: 10% 12636 30% 12636 50% 12636 90% 12636 99% 12636 ms Write: 10% 13453 30% 13453 50% 13453 90% 13453 99% 13453 ms Write: 10% 12554 30% 12554 50% 12554 90% 12554 99% 12554 ms Write: 10% 11975 30% 11975 50% 11975 90% 11975 99% 11975 ms Write: 10% 12132 30% 12132 50% 12132 90% 12132 99% 12132 ms Write: 10% 12079 30% 12079 50% 12079 90% 12079 99% 12079 ms Write: 10% 12620 30% 12620 50% 12620 90% 12620 99% 12620 ms Write: 10% 11551 30% 11551 50% 11551 90% 11551 99% 11551 ms Write: 10% 11506 30% 11506 50% 11506 90% 11506 99% 11506 ms Write: 10% 11373 30% 11373 50% 11373 90% 11373 99% 11373 ms Write: 10% 11686 30% 11686 50% 11686 90% 11686 99% 11686 ms Write: 10% 11725 30% 11725 50% 11725 90% 11725 99% 11725 ms Write: 10% 11210 30% 11210 50% 11210 90% 11210 99% 11210 ms Write: 10% 10865 30% 10865 50% 10865 90% 10865 99% 10865 ms Write: 10% 11043 30% 11043 50% 11043 90% 11043 99% 11043 ms Write: 10% 11225 30% 11225 50% 11225 90% 11225 99% 11225 ms Write: 10% 10732 30% 10732 50% 10732 90% 10732 99% 10732 ms Write: 10% 11704 30% 11704 50% 11704 90% 11704 99% 11704 ms Write: 10% 8640 30% 8640 50% 8640 90% 8640 99% 8640 ms Write: 10% 10476 30% 10476 50% 10476 90% 10476 99% 10476 ms Write: 10% 10981 30% 10981 50% 10981 90% 10981 99% 10981 ms Write: 10% 9476 30% 9476 50% 9476 90% 9476 99% 9476 ms Write: 10% 11455 30% 11455 50% 11455 90% 11455 99% 11455 ms Write: 10% 7264 30% 7264 50% 7264 90% 7264 99% 7264 ms Write: 10% 9541 30% 9541 50% 9541 90% 9541 99% 9541 ms Write: 10% 8870 30% 8870 50% 8870 90% 8870 99% 8870 ms Write: 10% 8558 30% 8558 50% 8558 90% 8558 99% 8558 ms Write: 10% 8699 30% 8699 50% 8699 90% 8699 99% 8699 ms Write: 10% 8453 30% 8453 50% 8453 90% 8453 99% 8453 ms Write: 10% 7221 30% 7221 50% 7221 90% 7221 99% 7221 ms Write: 10% 7905 30% 7905 50% 7905 90% 7905 99% 7905 ms Write: 10% 7543 30% 7543 50% 7543 90% 7543 99% 7543 ms Write: 10% 6953 30% 6953 50% 6953 90% 6953 99% 6953 ms Write: 10% 7111 30% 7111 50% 7111 90% 7111 99% 7111 ms Write: 10% 6806 30% 6806 50% 6806 90% 6806 99% 6806 ms Write: 10% 6483 30% 6483 50% 6483 90% 6483 99% 6483 ms Write: 10% 6244 30% 6244 50% 6244 90% 6244 99% 6244 ms Write: 10% 5776 30% 5776 50% 5776 90% 5776 99% 5776 ms Write: 10% 5764 30% 5764 50% 5764 90% 5764 99% 5764 ms Write: 10% 5333 30% 5333 50% 5333 90% 5333 99% 5333 ms Write: 10% 4767 30% 4767 50% 4767 90% 4767 99% 4767 ms Write: 10% 5476 30% 5476 50% 5476 90% 5476 99% 5476 ms Write: 10% 5068 30% 5068 50% 5068 90% 5068 99% 5068 ms Write: 10% 4932 30% 4932 50% 4932 90% 4932 99% 4932 ms Write: 10% 5174 30% 5174 50% 5174 90% 5174 99% 5174 ms Write: 10% 4864 30% 4864 50% 4864 90% 4864 99% 4864 ms Write: 10% 4850 30% 4850 50% 4850 90% 4850 99% 4850 ms Write: 10% 4790 30% 4790 50% 4790 90% 4790 99% 4790 ms Was written: 18.75 MiB, Speed: 0.3125 MiB/s Update: 10% 1705 30% 1705 50% 1705 90% 1705 99% 1705 ms Step 4. read modify write Write: 10% 2309 30% 2309 50% 2309 90% 2309 99% 2309 ms Write: 10% 6551 30% 6551 50% 6551 90% 6551 99% 6551 ms Write: 10% 11258 30% 11258 50% 11258 90% 11258 99% 11258 ms Write: 10% 11140 30% 11140 50% 11140 90% 11140 99% 11140 ms Write: 10% 12367 30% 12367 50% 12367 90% 12367 99% 12367 ms Write: 10% 12631 30% 12631 50% 12631 90% 12631 99% 12631 ms Write: 10% 13192 30% 13192 50% 13192 90% 13192 99% 13192 ms Write: 10% 12867 30% 12867 50% 12867 90% 12867 99% 12867 ms Write: 10% 12977 30% 12977 50% 12977 90% 12977 99% 12977 ms Write: 10% 13449 30% 13449 50% 13449 90% 13449 99% 13449 ms Write: 10% 13358 30% 13358 50% 13358 90 ... py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc6904f7640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc68d4f1640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc68dcf2640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc68e4f3640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc6914f9640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc6924fb640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc693cfe640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/olap/common/thread_helper.py", line 16 in join File "ydb/tests/olap/common/thread_helper.py", line 45 in join_all File "ydb/tests/olap/scenario/test_insert.py", line 103 in scenario_read_data_during_bulk_upsert File "/home/runner/.ya/build/build_root/kpz1/000581/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 110 in _test_suffix File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc694efd640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc6956fe640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc6a15fd640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc69a3ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc8fd5ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc8bcbfb640 (most recent call first): File "contrib/tools/python3/Lib/selectors.py", line 415 in select File "contrib/tools/python3/Lib/socketserver.py", line 235 in serve_forever File "contrib/python/Werkzeug/py3/werkzeug/serving.py", line 766 in serve_forever File "contrib/python/moto/py3/moto/moto_server/threaded_moto_server.py", line 24 in _server_entry File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc915fff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007fc917bff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007fc932c07440 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1624 in _shutdown Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/kpz1/000581/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/kpz1/000581/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/kpz1/000581/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/kpz1/000581', '--source-root', '/home/runner/.ya/build/build_root/kpz1/000581/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/kpz1/000581/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/kpz1/000581/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/kpz1/000581/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/kpz1/000581/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/kpz1/000581', '--source-root', '/home/runner/.ya/build/build_root/kpz1/000581/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/kpz1/000581/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) 2025-05-05 10:13:34,492 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-05-05 10:13:34,492 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 18 ydb/library/yaml_config/ut_transform [size:medium] ------ sole chunk ran 6 tests (total:7.58s - test:6.93s canon:0.42s) [fail] test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [default-linux-x86_64-relwithdebinfo] (0.85s) Test results differ from canonical: test_result[3]: files content differs: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff/test_transform.py.TestYamlConfigTransformations.test_basic.args1-dump_ds_init.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff ------ FAIL: 5 - GOOD, 1 - FAIL ydb/library/yaml_config/ut_transform ydb/tests/functional/hive [size:medium] nchunks:80 ------ [test_drain.py 0/20] chunk ran 1 test (total:19.13s - setup:0.01s test:19.00s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-relwithdebinfo] (16.46s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 2 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224038328: 6) (72075186224038400: 6). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 4/10] chunk ran 2 tests (total:203.98s - test:202.70s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (48.94s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (149.98s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 20 - GOOD, 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/tenants [size:medium] nchunks:20 ------ [10/20] chunk ran 5 tests (total:76.91s - test:76.78s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (21.26s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [11/20] chunk ran 5 tests (total:69.17s - setup:0.01s test:69.03s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (7.55s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [12/20] chunk ran 5 tests (total:286.49s - setup:0.02s test:286.28s) [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (77.64s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (68.48s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [2/20] chunk ran 6 tests (total:112.12s - setup:0.07s test:111.82s) [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (18.49s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (12.48s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ FAIL: 101 - GOOD, 6 - FAIL, 2 - XFAIL ydb/tests/functional/tenants ydb/tests/olap/data_quotas [size:medium] nchunks:10 ------ [0/10] chunk ran 1 test (total:631.70s - setup:0.05s test:600.13s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_quota_exhaustion.py::TestYdbWorkload::test (timeout) duration: 628.50s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/stderr [timeout] test_quota_exhaustion.py::TestYdbWorkload::test [default-linux-x86_64-relwithdebinfo] (628.50s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/test_quota_exhaustion.py.TestYdbWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff ------ [2/10] chunk ran 1 test (total:228.92s - setup:0.10s test:228.75s) [fail] test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [default-linux-x86_64-relwithdebinfo] (225.57s) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:236: in test_duplicates self.upsert_until_overload(lambda i: self.upsert_test_chunk(session, table_path, 0, retries=0), timeout_seconds=200) ydb/tests/olap/data_quotas/test_quota_exhaustion.py:83: in upsert_until_overload assert time.time() <= deadline, "deadline exceeded" E AssertionError: deadline exceeded E assert 1746438405.0266728 <= 1746438403.5557435 E + where 1746438405.0266728 = () E + where = time.time Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff/test_quota_exhaustion.py.TestYdbWorkload.test_duplicates.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/data_quotas/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 1 - FAIL, 1 - TIMEOUT ydb/tests/olap/data_quotas ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 18 tests (total:632.49s - setup:0.04s test:600.05s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 181.16s test_alter_tiering.py::TestAlterTiering::test[many_tables] (good) duration: 149.08s test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] (timeout) duration: 139.23s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 115.91s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 16.46s test_simple.py::TestSimple::test[tablestores] (good) duration: 12.50s test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] (good) duration: 2.78s test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] (good) duration: 2.51s test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] (good) duration: 2.10s test_simple.py::TestSimple::test_multi[alter_table] (good) duration: 2.02s 7 more tests with 3.75s total duration are not listed. test_insert.py::TestInsert::test[read_data_during_bulk_upsert] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [default-linux-x86_64-relwithdebinfo] (139.23s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test_multi.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 16 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:323.70s - test:323.60s) [fail] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-relwithdebinfo] (319.43s) ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py:203: in test raise Exception("Bucket1 is not empty") E Exception: Bucket1 is not empty Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [4/10] chunk ran 1 test (total:361.01s - setup:0.01s test:360.83s) [fail] ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [default-linux-x86_64-relwithdebinfo] (357.45s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:386: in test_ttl_delete raise Exception(".sys reports incorrect data portions") E Exception: .sys reports incorrect data portions Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteTtl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 2 - FAIL ydb/tests/olap/ttl_tiering ------ sole chunk ran 1 test (total:226.26s - setup:0.03s test:225.31s) Info: Test run has exceeded 32.0G (33554432K) memory limit with 62.8G (65836248K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 4190396 44.8M 44.0M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 4190617 32.7M 20.7M 8.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 4190669 241M 241M 192M └─ ydb-tests-stress-olap_workload-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:f 4193361 7.7G 7.4G 7.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 4193419 7.0G 6.9G 6.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 4193446 7.0G 6.9G 6.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 4193502 7.3G 7.2G 7.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 4193600 7.0G 6.9G 6.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 4193614 7.0G 6.9G 6.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 4193651 7.0G 6.9G 6.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 4194048 6.9G 6.7G 6.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing 4194116 7.0G 6.8G 6.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:304.44s - setup:0.05s test:302.17s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 9.0G (9405380K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 4115737 44.8M 44.4M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 4116277 32.6M 20.6M 8.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 4116289 945M 923M 892M └─ ydb-tests-stress-simple_queue-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:fa 4118752 1022M 966M 769M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 4118755 914M 882M 691M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 4118757 997M 982M 763M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 4118760 948M 929M 692M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 4118762 866M 832M 641M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 4118765 864M 849M 648M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 4118768 962M 943M 712M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 4118774 912M 901M 695M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ 4118782 909M 909M 698M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff/stderr ydb/core/client/ut [size:medium] nchunks:60 ------ [59/60] chunk ran 2 tests (total:6.67s - recipes:0.33s test:5.95s recipes:0.33s) [fail] TObjectStorageListingTest::TestSkipShards [default-linux-x86_64-relwithdebinfo] (0.53s) equal assertion failed at ydb/core/client/object_storage_listing_ut.cpp:1075, virtual void NKikimr::NFlatTests::NTestSuiteTObjectStorageListingTest::TTestCaseTestSkipShards::Execute_(NUnitTest::TTestContext &): 2 == count TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:14 Get at /-S/util/generic/ptr.h:595:16 UnRef at /-S/util/generic/ptr.h:640:13 NKikimr::NFlatTests::NTestSuiteTObjectStorageListingTest::TCurrentTest::Execute()::'lambda'()::operator()() const at /-S/ydb/core/client/object_storage_listing_ut.cpp:0:1 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:640:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/client/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/client/ut/test-results/unittest/testing_out_stuff/TObjectStorageListingTest.TestSkipShards.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/client/ut/test-results/unittest/testing_out_stuff/TObjectStorageListingTest.TestSkipShards.out ------ FAIL: 122 - GOOD, 1 - FAIL ydb/core/client/ut Total 448 suites: 440 - GOOD 6 - FAIL 2 - TIMEOUT Total 13741 tests: 11093 - GOOD 14 - FAIL 1 - NOT_LAUNCHED 2 - XFAIL 2 - TIMEOUT 2629 - SKIPPED Cache efficiency ratio is 95.38% (44973 of 47149). Local: 0 (0.00%), dist: 2576 (5.46%), by dynamic uids: 0 (0.00%), avoided: 42397 (89.92%) Dist cache download: count=2548, size=22.95 GiB, speed=179.31 MiB/s Disk usage for tools/sdk at least 778.61 MiB Additional disk space consumed for build cache 58.66 GiB Critical path: [ 834 ms] [CF] [20f9C3Iev1TOpW73h2_hFg default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/sandbox.cpp [started: 0 (1746437908873), finished: 834 (1746437909707)] [ 1894 ms] [CC] [GqENgENKCpXvzmkkepKrNw default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/sandbox.cpp [started: 52641 (1746437961514), finished: 54535 (1746437963408)] [ 167 ms] [AR] [I_WBwUTtgQjTzWqSlSKJPg default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/library/cpp/build_info/liblibrary-cpp-build_info.a [started: 108720 (1746438017593), finished: 108887 (1746438017760)] [ 14904 ms] [LD] [3Kdk5uWKUsmXRjPuddvr_w default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 117363 (1746438026236), finished: 132267 (1746438041140)] [632403 ms] [TM] [rnd-7000888348418243822 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 287059 (1746438195932), finished: 919462 (1746438828335)] [ 1071 ms] [TA] [rnd-0yohdvirevgffmwv]: $(BUILD_ROOT)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} [started: 919505 (1746438828378), finished: 920576 (1746438829449)] Time from start: 2130079.294921875 ms, time elapsed by graph 651273 ms, time diff 1478806.294921875 ms. The longest 10 tasks: [632988 ms] [TM] [rnd-qb81onik78jtr45j default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 1746439383182, finished: 1746440016170] [632403 ms] [TM] [rnd-7000888348418243822 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746438195932, finished: 1746438828335] [505085 ms] [TM] [rnd-j771w73wydzjy4uz default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/column_family/compression/py3test [started: 1746439351515, finished: 1746439856600] [483199 ms] [TM] [rnd-9361312001421167847 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746438550390, finished: 1746439033589] [480925 ms] [TM] [rnd-13034406957713372748 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/ydb_cli/py3test [started: 1746438982852, finished: 1746439463777] [456642 ms] [TM] [rnd-16416619077273522800 default-linux-x86_64 relwithdebinfo]: ydb/tests/fq/s3/py3test [started: 1746438805335, finished: 1746439261977] [443536 ms] [TM] [rnd-7397350139480433133 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/serverless/py3test [started: 1746438374041, finished: 1746438817577] [368698 ms] [TM] [rnd-z20wn4any5bycub6 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/py3test [started: 1746439385568, finished: 1746439754266] [367151 ms] [TM] [rnd-12565668042407836422 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/tenants/py3test [started: 1746438232400, finished: 1746438599551] [361529 ms] [TM] [rnd-6365164175483890871 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746438560004, finished: 1746438921533] Total time by type: [77194774 ms] [TM] [count: 1741, ave time 44339.33 msec] [13258364 ms] [prepare:get from dist cache] [count: 2576, ave time 5146.88 msec] [ 1661060 ms] [TS] [count: 305, ave time 5446.10 msec] [ 787862 ms] [TA] [count: 82, ave time 9608.07 msec] [ 495856 ms] [prepare:tools] [count: 20, ave time 24792.80 msec] [ 281075 ms] [LD] [count: 23, ave time 12220.65 msec] [ 235917 ms] [prepare:bazel-store] [count: 2, ave time 117958.50 msec] [ 62756 ms] [prepare:AC] [count: 4, ave time 15689.00 msec] [ 51942 ms] [prepare:put into local cache, clean build dir] [count: 2595, ave time 20.02 msec] [ 23733 ms] [PY] [count: 1, ave time 23733.00 msec] [ 9190 ms] [prepare:resources] [count: 2, ave time 4595.00 msec] [ 6093 ms] [CC] [count: 5, ave time 1218.60 msec] [ 3468 ms] [AR] [count: 5, ave time 693.60 msec] [ 2965 ms] [UN] [count: 3, ave time 988.33 msec] [ 2235 ms] [SB] [count: 2, ave time 1117.50 msec] [ 1671 ms] [CF] [count: 2, ave time 835.50 msec] [ 1428 ms] [ld] [count: 2, ave time 714.00 msec] [ 1233 ms] [PK] [count: 2, ave time 616.50 msec] [ 883 ms] [BI] [count: 1, ave time 883.00 msec] [ 359 ms] [CP] [count: 1, ave time 359.00 msec] [ 212 ms] [prepare:put to dist cache] [count: 24, ave time 8.83 msec] [ 130 ms] [BN] [count: 1, ave time 130.00 msec] [ 45 ms] [prepare:clean] [count: 3, ave time 15.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 79643696 ms (99.59%) Total run tasks time - 79968969 ms Configure time - 29.5 s Statistics overhead 1234 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.RATOsPId8A --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7694/7695 modules configured] [0 ymakes processing] [7700/7700 modules configured] [0 ymakes processing] [7700/7700 modules configured] [4128/4128 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution | 7.7%| CLEANING SYMRES | 1.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut | 2.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut | 4.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a | 5.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a | 6.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut | 7.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load | 8.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut | 9.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 | 8.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool | 9.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |10.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |13.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |14.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |15.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |15.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |17.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |17.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |18.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |18.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |18.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |19.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |19.7%| PREPARE $(CLANG-1922233694) |20.1%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |20.5%| PREPARE $(CLANG-874354456) |20.8%| PREPARE $(CLANG18-1866954364) |21.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |21.6%| PREPARE $(GDB) |22.0%| PREPARE $(CLANG16-1380963495) |22.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |22.8%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |23.2%| PREPARE $(YMAKE_PYTHON3-4256832079) |23.6%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |23.9%| PREPARE $(LLD_ROOT-3808007503) |24.3%| PREPARE $(CLANG_FORMAT-1286082657) |24.7%| PREPARE $(PYTHON) |25.1%| PREPARE $(FLAKE8_PY3-715603131) |25.5%| PREPARE $(TEST_TOOL_HOST-sbr:8580453620) |25.9%| PREPARE $(WITH_JDK-sbr:7832760150) |26.3%| PREPARE $(WITH_JDK17-sbr:7832760150) |26.6%| PREPARE $(JDK17-472926544) |27.0%| PREPARE $(JDK_DEFAULT-472926544) |27.4%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |28.2%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |28.6%| COMPACTING CACHE 58.7GiB |29.0%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |29.3%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |29.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |30.1%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |30.5%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |30.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |31.3%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |31.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |32.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |32.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |32.8%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |33.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |33.6%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |34.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |34.4%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |34.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |35.1%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |35.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |35.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |36.3%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |36.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> TObjectStorageListingTest::TestSkipShards |42.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |42.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |43.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |43.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |44.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |44.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |44.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |45.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |45.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |45.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |46.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |46.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |47.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |47.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |47.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |48.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |48.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |49.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |49.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |49.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |50.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] |50.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |51.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |51.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |51.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |52.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |53.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |54.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |54.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/s8wm/000092/r3tmp/tmpQv1A2y/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5608, node 1 TClient is connected to server localhost:17097 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> test_drain.py::TestHive::test_drain_on_stop >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> test_quota_exhaustion.py::TestYdbWorkload::test_delete |63.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] |65.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |66.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |68.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |69.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |70.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |70.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |71.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |71.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |71.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |72.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |72.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |73.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |73.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |73.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |74.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] >> test_workload.py::TestYdbWorkload::test[row] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_workload.py::TestYdbWorkload::test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |85.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |85.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] |86.9%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] 2025-05-05 10:56:20,554 ERROR devtools.ya.test.canon.compare: Cannot calculate diff: Traceback (most recent call last): File "devtools/ya/test/canon/compare.py", line 402, in _get_file_diff_via_diff raise Exception( Exception: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: |88.4%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |88.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [FAIL] >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] |89.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |89.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [FAIL] |90.0%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |90.3%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |90.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [FAIL] |91.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] |91.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [FAIL] |91.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/py3test >> test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [FAIL] |92.3%| [TA] $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} |92.7%| [TA] {RESULT} $(B)/ydb/tests/functional/tenants/test-results/py3test/{meta.json ... results_accumulator.log} >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] Test command err: contrib/python/tornado/tornado-4/tornado/gen.py:1064: DeprecationWarning: the (type, exc, tb) signature of throw() is deprecated, use the single-arg signature instead. |93.8%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete [GOOD] Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/s8wm/00004c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/s8wm/00004c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk1/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/subprocess.py:1129: ResourceWarning: subprocess 51692 is still running ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_delete [GOOD] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert: got overload issue delete #0 ok delete #1 ok delete #2 ok delete #3 ok delete #4 ok delete #5 ok delete #6 ok delete #7 ok delete #8 ok delete #9 ok delete #10 ok delete #11 ok delete #12 ok delete #13 ok delete #14 ok delete #15 ok delete #16 ok delete #17 ok delete #18 ok delete #19 ok delete #20 ok delete #21 ok delete #22 ok delete #23 ok delete #24 ok delete #25 ok delete #26 ok delete #27 ok delete #28 ok delete #29 ok delete #30 ok delete #31 ok delete #32 ok delete #33 ok delete #34 ok delete #35 ok delete #36 ok delete #37 ok delete #38 ok delete #39 ok delete #40 ok delete #41 ok delete #42 ok delete #43 ok delete #44 ok delete #45 ok delete #46 ok delete #47 ok delete #48 ok delete #49 ok delete #50 ok delete #51 ok delete #52 ok delete #53 ok delete #54 ok delete #55 ok delete #56 ok delete #57 ok delete #58 ok delete #59 ok delete #60 ok delete #61 ok delete #62 ok delete #63 ok delete #64 ok delete #65 ok delete #66 ok delete #67 ok delete #68 ok delete #69 ok delete #70 ok delete #71 ok delete #72 ok delete #73 ok delete #74 ok delete #75 ok delete #76 ok delete #77 ok delete #78 ok delete #79 ok delete #80 ok delete #81 ok delete #82 ok delete #83 ok delete #84 ok delete #85 ok delete #86 ok delete #87 ok delete #88 ok delete #89 ok delete #90 ok delete #91 ok delete #92 ok delete #93 ok delete #94 ok delete #95 ok delete #96 ok delete #97 ok delete #98 ok delete #99 ok delete #100 ok delete #101 ok delete #102 ok delete #103 ok delete #104 ok delete #105 ok delete #106 ok delete #107 ok delete #108 ok delete #109 ok delete #110 ok delete #111 ok delete #112 ok delete #113 ok delete #114 ok delete #115 ok delete #116 ok delete #117 ok delete #118 ok delete #119 ok delete #120 ok delete #121 ok delete #122 ok delete #123 ok delete #124 ok delete #125 ok delete #126 ok delete #127 ok delete #128 ok delete #129 ok delete #130 ok delete #131 ok delete #132 ok delete #133 ok delete #134 ok delete #135 ok delete #136 ok delete #137 ok delete #138 ok delete #139 ok delete #140 ok delete #141 ok delete #142 ok delete #143 ok delete #144 ok delete #145 ok delete #146 ok delete #147 ok delete #148 ok delete #149 ok delete #150 ok delete #151 ok delete #152 ok delete #153 ok delete #154 ok delete #155 ok delete #156 ok delete #157 ok delete #158 ok delete #159 ok delete #160 ok delete #161 ok delete #162 ok delete #163 ok delete #164 ok delete #165 ok delete #166 ok delete #167 ok delete #168 ok delete #169 ok delete #170 ok delete #171 ok delete #172 ok delete #173 ok delete #174 ok delete #175 ok delete #176 ok delete #177 ok delete #178 ok delete #179 ok delete #180 ok delete #181 ok delete #182 ok delete #183 ok delete #184 ok delete #185 ok delete #186 ok delete #187 ok delete #188 ok delete #189 ok delete #190 ok delete #191 ok delete #192 ok delete #193 ok delete #194 ok delete #195 ok delete #196 ok delete #197 ok delete #198 ok delete #199 ok delete #200 ok delete #201 ok delete #202 ok delete #203 ok delete #204 ok delete #205 ok delete #206 ok delete #207 ok delete #208 ok delete #209 ok delete #210 ok delete #211 ok delete #212 ok delete #213 ok delete #214 ok delete #215 ok delete #216 ok delete #217 ok delete #218 ok delete #219 ok delete #220 ok delete #221 ok delete #222 ok delete #223 ok delete #224 ok delete #225 ok delete #226 ok delete #227 ok delete #228 ok delete #229 ok delete #230 ok delete #231 ok delete #232 ok delete #233 ok delete #234 ok delete #235 ok delete #236 ok delete #237 ok delete #238 ok delete #239 ok delete #240 ok delete #241 ok delete #242 ok delete #243 ok delete #244 ok delete #245 ok delete #246 ok delete #247 ok delete #248 ok delete #249 ok delete #250 ok delete #251 ok delete #252 ok delete #253 ok delete #254 ok delete #255 ok delete #256 ok delete #257 ok delete #258 ok delete #259 ok delete #260 ok delete #261 ok delete #262 ok delete #263 ok delete #264 ok delete #265 ok delete #266 ok delete #267 ok delete #268 ok delete #269 ok delete #270 ok delete #271 ok delete #272 ok delete #273 ok delete #274 ok delete #275 ok delete #276 ok delete #277 ok delete #278 ok delete #279 ok delete #280 ok delete #281 ok delete #282 ok delete #283 ok delete #284 ok delete #285 ok delete #286 ok delete #287 ok delete #288 ok delete #289 ok delete #290 ok delete #291 ok delete #292 ok delete #293 ok delete #294 ok delete #295 ok delete #296 ok delete #297 ok delete #298 ok delete #299 ok delete #300 ok delete #301 ok delete #302 ok delete #303 ok delete #304 ok delete #305 ok delete #306 ok delete #307 ok delete #308 ok delete #309 ok delete #310 ok delete #311 ok delete #312 ok delete #313 ok delete #314 ok delete #315 ok delete #316 ok delete #317 ok delete #318 ok delete #319 ok delete #320 ok delete #321 ok delete #322 ok delete #323 ok delete #324 ok delete #325 ok delete #326 ok delete #327 ok delete #328 ok delete #329 ok delete #330 ok delete #331 ok delete #332 ok delete #333 ok delete #334 ok delete #335 ok delete #336 ok delete #337 ok delete #338 ok delete #339 ok delete #340 ok delete #341 ok delete #342 ok delete #343 ok delete #344 ok delete #345 ok delete #346 ok delete #347 ok delete #348 ok delete #349 ok delete #350 ok delete #351 ok delete #352 ok delete #353 ok delete #354 ok delete #355 ok delete #356 ok delete #357 ok delete #358 ok delete #359 ok delete #360 ok delete #361 ok delete #362 ok delete #363 ok delete #364 ok delete #365 ok delete #366 ok delete #367 ok delete #368 ok delete #369 ok delete #370 ok delete #371 ok delete #372 ok delete #373 ok delete #374 ok delete #375 ok delete #376 ok delete #377 ok delete #378 ok delete #379 ok delete #380 ok delete #381 ok delete #382 ok delete #383 ok delete #384 ok delete #385 ok delete #386 ok delete #387 ok delete #388 ok delete #389 ok delete #390 ok delete #391 ok delete #392 ok delete #393 ok delete #394 ok delete #395 ok delete #396 ok delete #397 ok delete #398 ok delete #399 ok delete #400 ok delete #401 ok delete #402 ok delete #403 ok delete #404 ok delete #405 ok delete #406 ok delete #407 ok delete #408 ok delete #409 ok delete #410 ok delete #411 ok delete #412 ok delete #413 ok delete #414 ok delete #415 ok delete #416 ok delete #417 ok delete #418 ok delete #419 ok delete #420 ok delete #421 ok delete #422 ok delete #423 ok delete #424 ok delete #425 ok delete #426 ok delete #427 ok delete #428 ok delete #429 ok delete #430 ok delete #431 ok delete #432 ok delete #433 ok delete #434 ok delete #435 ok delete #436 ok delete #437 ok delete #438 ok delete #439 ok delete #440 ok delete #441 ok delete #442 ok delete #443 ok delete #444 ok delete #445 ok delete #446 ok delete #447 ok delete #448 ok delete #449 ok delete #450 ok delete #451 ok delete #452 ok delete #453 ok delete #454 ok delete #455 ok delete #456 ok delete #457 ok delete #458 ok delete #459 ok delete #460 ok delete #461 ok delete #462 ok delete #463 ok delete #464 ok delete #465 ok delete #466 ok delete #467 ok delete #468 ok delete #469 ok delete #470 ok delete #471 ok delete #472 ok delete #473 ok delete #474 ok delete #475 ok delete #476 ok delete #477 ok delete #478 ok delete #479 ok delete #480 ok delete #481 ok delete #482 ok delete #483 ok delete #484 ok delete #485 ok delete #486 ok delete #487 ok delete #488 ok delete #489 ok delete #490 ok delete #491 ok delete #492 ok delete #493 ok delete #494 ok delete #495 ok delete #496 ok delete #497 ok delete #498 ok delete #499 ok >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |95.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |95.8%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test |96.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |96.5%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test_duplicates [GOOD] Test command err: Database name /Root/test upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert: got overload issue >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_quota_exhaustion.py::TestYdbWorkload::test [GOOD] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/py3test >> test_quota_exhaustion.py::TestYdbWorkload::test [GOOD] Test command err: upsert #0 ok, result: [] Quota exceeded False upsert #1 ok, result: [] Quota exceeded False upsert #2 ok, result: [] Quota exceeded False upsert #3 ok, result: [] Quota exceeded False upsert #4 ok, result: [] Quota exceeded False upsert #5 ok, result: [] Quota exceeded False upsert #6 ok, result: [] Quota exceeded False upsert #7 ok, result: [] Quota exceeded False upsert #8 ok, result: [] Quota exceeded False upsert #9 ok, result: [] Quota exceeded False upsert #10 ok, result: [] Quota exceeded False upsert #11 ok, result: [] Quota exceeded False upsert #12 ok, result: [] Quota exceeded False upsert #13 ok, result: [] Quota exceeded False upsert #14 ok, result: [] Quota exceeded False upsert #15 ok, result: [] Quota exceeded False upsert #16 ok, result: [] Quota exceeded False upsert #17 ok, result: [] Quota exceeded False upsert #18 ok, result: [] Quota exceeded False upsert #19 ok, result: [] Quota exceeded False upsert #20 ok, result: [] Quota exceeded False upsert #21 ok, result: [] Quota exceeded False upsert #22 ok, result: [] Quota exceeded False upsert #23 ok, result: [] Quota exceeded False upsert #24 ok, result: [] Quota exceeded False upsert #25 ok, result: [] Quota exceeded False upsert #26 ok, result: [] Quota exceeded False upsert #27 ok, result: [] Quota exceeded False upsert #28 ok, result: [] Quota exceeded False upsert #29 ok, result: [] Quota exceeded False upsert #30 ok, result: [] Quota exceeded False upsert #31 ok, result: [] Quota exceeded False upsert #32 ok, result: [] Quota exceeded False upsert #33 ok, result: [] Quota exceeded False upsert #34 ok, result: [] Quota exceeded False upsert #35 ok, result: [] Quota exceeded False upsert #36 ok, result: [] Quota exceeded False upsert #37 ok, result: [] Quota exceeded False upsert #38 ok, result: [] Quota exceeded False upsert #39 ok, result: [] Quota exceeded False upsert #40 ok, result: [] Quota exceeded False upsert #41 ok, result: [] Quota exceeded False upsert #42 ok, result: [] Quota exceeded False upsert #43 ok, result: [] Quota exceeded False upsert #44 ok, result: [] Quota exceeded False upsert #45 ok, result: [] Quota exceeded False upsert #46 ok, result: [] Quota exceeded False upsert #47 ok, result: [] Quota exceeded False upsert #48 ok, result: [] Quota exceeded False upsert #49 ok, result: [] Quota exceeded False upsert #50 ok, result: [] Quota exceeded False upsert #51 ok, result: [] Quota exceeded False upsert #52 ok, result: [] Quota exceeded False upsert #53 ok, result: [] Quota exceeded False upsert #54 ok, result: [] Quota exceeded False upsert #55 ok, result: [] Quota exceeded False upsert #56 ok, result: [] Quota exceeded False upsert #57 ok, result: [] Quota exceeded False upsert #58 ok, result: [] Quota exceeded False upsert #59 ok, result: [] Quota exceeded False upsert #60 ok, result: [] Quota exceeded False upsert #61 ok, result: [] Quota exceeded False upsert #62 ok, result: [] Quota exceeded False upsert #63 ok, result: [] Quota exceeded False upsert #64 ok, result: [] Quota exceeded False upsert #65 ok, result: [] Quota exceeded False upsert #66 ok, result: [] Quota exceeded False upsert #67 ok, result: [] Quota exceeded False upsert #68 ok, result: [] Quota exceeded False upsert #69 ok, result: [] Quota exceeded False upsert #70 ok, result: [] Quota exceeded False upsert #71 ok, result: [] Quota exceeded False upsert #72 ok, result: [] Quota exceeded False upsert #73 ok, result: [] Quota exceeded False upsert #74 ok, result: [] Quota exceeded False upsert #75 ok, result: [] Quota exceeded False upsert #76 ok, result: [] Quota exceeded False upsert #77 ok, result: [] Quota exceeded False upsert #78 ok, result: [] Quota exceeded False upsert #79 ok, result: [] Quota exceeded False upsert #80 ok, result: [] Quota exceeded False upsert #81 ok, result: [] Quota exceeded False upsert #82 ok, result: [] Quota exceeded False upsert #83 ok, result: [] Quota exceeded False upsert #84 ok, result: [] Quota exceeded False upsert #85 ok, result: [] Quota exceeded False upsert #86 ok, result: [] Quota exceeded False upsert #87 ok, result: [] Quota exceeded False upsert #88 ok, result: [] Quota exceeded False upsert #89 ok, result: [] Quota exceeded False upsert #90 ok, result: [] Quota exceeded False upsert #91 ok, result: [] Quota exceeded False upsert #92 ok, result: [] Quota exceeded False upsert #93 ok, result: [] Quota exceeded False upsert #94 ok, result: [] Quota exceeded False upsert #95 ok, result: [] Quota exceeded False upsert #96 ok, result: [] Quota exceeded False upsert #97 ok, result: [] Quota exceeded False upsert #98 ok, result: [] Quota exceeded False upsert #99 ok, result: [] Quota exceeded False upsert #100 ok, result: [] Quota exceeded False upsert #101 ok, result: [] Quota exceeded False upsert #102 ok, result: [] Quota exceeded False upsert #103 ok, result: [] Quota exceeded False upsert #104 ok, result: [] Quota exceeded False upsert #105 ok, result: [] Quota exceeded False upsert #106 ok, result: [] Quota exceeded False upsert #107 ok, result: [] Quota exceeded False upsert #108 ok, result: [] Quota exceeded False upsert #109 ok, result: [] Quota exceeded False upsert #110 ok, result: [] Quota exceeded False upsert #111 ok, result: [] Quota exceeded False upsert #112 ok, result: [] Quota exceeded False upsert #113 ok, result: [] Quota exceeded False upsert #114 ok, result: [] Quota exceeded False upsert #115 ok, result: [] Quota exceeded False upsert #116 ok, result: [] Quota exceeded False upsert #117 ok, result: [] Quota exceeded False upsert #118 ok, result: [] Quota exceeded False upsert #119 ok, result: [] Quota exceeded False upsert #120 ok, result: [] Quota exceeded False upsert #121 ok, result: [] Quota exceeded False upsert #122 ok, result: [] Quota exceeded False upsert #123 ok, result: [] Quota exceeded False upsert #124 ok, result: [] Quota exceeded False upsert #125 ok, result: [] Quota exceeded False upsert #126 ok, result: [] Quota exceeded False upsert #127 ok, result: [] Quota exceeded False upsert #128 ok, result: [] Quota exceeded False upsert #129 ok, result: [] Quota exceeded False upsert #130 ok, result: [] Quota exceeded False upsert #131 ok, result: [] Quota exceeded False upsert #132 ok, result: [] Quota exceeded False upsert #133 ok, result: [] Quota exceeded False upsert #134 ok, result: [] Quota exceeded False upsert #135 ok, result: [] Quota exceeded False upsert #136 ok, result: [] Quota exceeded False upsert #137 ok, result: [] Quota exceeded False upsert #138 ok, result: [] Quota exceeded False upsert #139 ok, result: [] Quota exceeded False upsert #140 ok, result: [] Quota exceeded False upsert #141 ok, result: [] Quota exceeded False upsert #142 ok, result: [] Quota exceeded False upsert #143 ok, result: [] Quota exceeded False upsert #144 ok, result: [] Quota exceeded False upsert #145 ok, result: [] Quota exceeded False upsert #146 ok, result: [] Quota exceeded False upsert #147 ok, result: [] Quota exceeded False upsert #148 ok, result: [] Quota exceeded False upsert #149 ok, result: [] Quota exceeded False upsert #150 ok, result: [] Quota exceeded False upsert #151 ok, result: [] Quota exceeded False upsert #152 ok, result: [] Quota exceeded False upsert #153 ok, result: [] Quota exceeded False upsert #154 ok, result: [] Quota exceeded False upsert #155 ok, result: [] Quota exceeded False upsert #156 ok, result: [] Quota exceeded False upsert #157 ok, result: [] Quota exceeded False upsert #158 ok, result: [] Quota exceeded False upsert #159 ok, result: [] Quota exceeded False upsert #160 ok, result: [] Quota exceeded False upsert #161 ok, result: [] Quota exceeded False upsert #162 ok, result: [] Quota exceeded False upsert #163 ok, result: [] Quota exceeded False upsert #164 ok, result: [] Quota exceeded False upsert #165 ok, result: [] Quota exceeded False upsert #166 ok, result: [] Quota exceeded False upsert #167 ok, result: [] Quota exceeded False upsert #168 ok, result: [] Quota exceeded False upsert #169 ok, result: [] Quota exceeded False upsert #170 ok, result: [] Quota exceeded False upsert #171 ok, result: [] Quota exceeded False upsert #172 ok, result: [] Quota exceeded False upsert #173 ok, result: [] Quota exceeded False upsert #174 ok, result: [] Quota exceeded False upsert #175 ok, result: [] Quota exceeded False upsert #176 ok, result: [] Quota exceeded False upsert #177 ok, result: [] Quota exceeded False upsert #178 ok, result: [] Quota exceeded False upsert #179 ok, result: [] Quota exceeded False upsert #180 ok, result: [] Quota exceeded False upsert #181 ok, result: [] Quota exceeded False upsert #182 ok, result: [] Quota exceeded False upsert #183 ok, result: [] Quota exceeded False upsert #184 ok, result: [] Quota exceeded False upsert #185 ok, result: [] Quota exceeded False upsert #186 ok, result: [] Quota exceeded False upsert #187 ok, result: [] Quota exceeded False upsert #188 ok, result: [] Quota exceeded False upsert #189 ok, result: [] Quota exceeded False upsert #190 ok, result: [] Quota exceeded False upsert #191 ok, result: [] Quota exceeded False upsert #192 ok, result: [] Quota exceeded False upsert #193 ok, result: [] Quota exceeded False upsert #194 ok, result: [] Quota exceeded False upsert #195 ok, result: [] Quota exceeded False upsert #196 ok, result: [] Quota exceeded False upsert #197 ok, result: [] Quota exceeded False upsert #198 ok, result: [] Quota exceeded False upsert #199 ok, result: [] Quota exceeded False upsert #200 ok, result: [] Quota exceeded False upsert #201 ok, result: [] Quota exceeded False upsert #202 ok, result: [] Quota exceeded False upsert #203 ok, result: [] Quota exceeded False upsert #204 ok, result: [] Quota exceeded False upsert #205 ok, result: [] Quota exceeded False upsert #206 ok, result: [] Quota exceeded False upsert #207 ok, result: [] Quota exceeded False upsert #208 ok, result: [] Quota exceeded False upsert #209 ok, result: [] Quota exceeded False upsert #210 ok, result: [] Quota exceeded False upsert #211 ok, result: [] Quota exceeded False upsert #212 ok, result: [] Quota exceeded False upsert #213 ok, result: [] Quota exceeded False upsert #214 ok, result: [] Quota exceeded False upsert #215 ok, result: [] Quota exceeded False upsert #216 ok, result: [] Quota exceeded False upsert #217 ok, result: [] Quota exceeded False upsert #218 ok, result: [] Quota exceeded False upsert #219 ok, result: [] Quota exceeded False upsert #220 ok, result: [] Quota exceeded False upsert #221 ok, result: [] Quota exceeded False upsert #222 ok, result: [] Quota exceeded False upsert #223 ok, result: [] Quota exceeded False upsert #224 ok, result: [] Quota exceeded False upsert #225 ok, result: [] Quota exceeded False upsert #226 ok, result: [] Quota exceeded False upsert #227 ok, result: [] Quota exceeded False upsert #228 ok, result: [] Quota exceeded False upsert #229 ok, result: [] Quota exceeded False upsert #230 ok, result: [] Quota exceeded False upsert #231 ok, result: [] Quota exceeded False upsert #232 ok, result: [] Quota exceeded False upsert #233 ok, result: [] Quota exceeded False upsert #234 ok, result: [] Quota exceeded False upsert #235 ok, result: [] Quota exceeded False upsert #236 ok, result: [] Quota exceeded False upsert #237 ok, result: [] Quota exceeded False upsert #238 ok, result: [] Quota exceeded False upsert #239 ok, result: [] Quota exceeded False upsert #240 ok, result: [] Quota exceeded False upsert #241 ok, result: [] Quota exceeded False upsert #242 ok, result: [] Quota exceeded False upsert #243 ok, result: [] Quota exceeded False upsert #244 ok, result: [] Quota exceeded False upsert #245 ok, result: [] Quota exceeded False upsert #246 ok, result: [] Quota exceeded False upsert #247 ok, result: [] Quota exceeded False upsert #248 ok, result: [] Quota exceeded False upsert: got overload issue |97.7%| [TA] $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |98.1%| [TA] {RESULT} $(B)/ydb/tests/olap/data_quotas/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-05-05 11:06:13,841 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 11:06:13,907 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 47556 168M 163M 113M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/s8wm/000051/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modul 48762 1004M 986M 742M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/s8wm/000051/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/ 51007 110M 104M 82.4M └─ moto_server s3 --port 22148 Test command err: library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/s8wm/000051/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.out.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback library/recipes/common/__init__.py:29: ResourceWarning: unclosed file <_io.TextIOWrapper name='/home/runner/.ya/build/build_root/s8wm/000051/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff/moto_server.err.log' mode='w' encoding='utf-8'> process = subprocess.Popen( ResourceWarning: Enable tracemalloc to get the object allocation traceback File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 88, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/s8wm/000051/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/s8wm/000051', '--source-root', '/home/runner/.ya/build/build_root/s8wm/000051/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/s8wm/000051/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete', '--tb', 'short', '--modulo', '10', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/s8wm/000051/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/s8wm/000051', '--source-root', '/home/runner/.ya/build/build_root/s8wm/000051/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/s8wm/000051/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/chunk0/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteTtl::test_ttl_delete', '--tb', 'short', '--modulo', '10', '--modulo-index', '0', '--partition-mode', 'SEQUENTIAL', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) |98.8%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test[many_tables] 2025-05-05 11:06:17,071 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-05-05 11:06:17,252 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 49807 2.8G 2.8G 2.7G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/s8wm/000021/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 69899 1.4G 1.4G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/s8wm/000021/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter Test command err: test_suffix, num 0, table path read_update_write_load start_time 1746442581.4292674 Path read_update_write_load removed Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 3314 30% 3314 50% 3314 90% 3314 99% 3314 ms Write: 10% 6312 30% 6312 50% 6312 90% 6312 99% 6312 ms Write: 10% 8696 30% 8696 50% 8696 90% 8696 99% 8696 ms Write: 10% 9095 30% 9095 50% 9095 90% 9095 99% 9095 ms Write: 10% 8366 30% 8366 50% 8366 90% 8366 99% 8366 ms Write: 10% 8878 30% 8878 50% 8878 90% 8878 99% 8878 ms Write: 10% 8385 30% 8385 50% 8385 90% 8385 99% 8385 ms Write: 10% 8798 30% 8798 50% 8798 90% 8798 99% 8798 ms Write: 10% 8764 30% 8764 50% 8764 90% 8764 99% 8764 ms Write: 10% 8629 30% 8629 50% 8629 90% 8629 99% 8629 ms Write: 10% 8686 30% 8686 50% 8686 90% 8686 99% 8686 ms Write: 10% 8612 30% 8612 50% 8612 90% 8612 99% 8612 ms Write: 10% 8508 30% 8508 50% 8508 90% 8508 99% 8508 ms Write: 10% 8530 30% 8530 50% 8530 90% 8530 99% 8530 ms Write: 10% 8482 30% 8482 50% 8482 90% 8482 99% 8482 ms Write: 10% 8392 30% 8392 50% 8392 90% 8392 99% 8392 ms Write: 10% 8427 30% 8427 50% 8427 90% 8427 99% 8427 ms Write: 10% 7922 30% 7922 50% 7922 90% 7922 99% 7922 ms Write: 10% 8034 30% 8034 50% 8034 90% 8034 99% 8034 ms Write: 10% 8104 30% 8104 50% 8104 90% 8104 99% 8104 ms Write: 10% 7450 30% 7450 50% 7450 90% 7450 99% 7450 ms Write: 10% 7073 30% 7073 50% 7073 90% 7073 99% 7073 ms Write: 10% 6854 30% 6854 50% 6854 90% 6854 99% 6854 ms Write: 10% 6994 30% 6994 50% 6994 90% 6994 99% 6994 ms Write: 10% 5769 30% 5769 50% 5769 90% 5769 99% 5769 ms Write: 10% 6835 30% 6835 50% 6835 90% 6835 99% 6835 ms Write: 10% 6858 30% 6858 50% 6858 90% 6858 99% 6858 ms Write: 10% 6312 30% 6312 50% 6312 90% 6312 99% 6312 ms Write: 10% 4429 30% 4429 50% 4429 90% 4429 99% 4429 ms Write: 10% 6136 30% 6136 50% 6136 90% 6136 99% 6136 ms Write: 10% 5571 30% 5571 50% 5571 90% 5571 99% 5571 ms Write: 10% 5362 30% 5362 50% 5362 90% 5362 99% 5362 ms Write: 10% 5922 30% 5922 50% 5922 90% 5922 99% 5922 ms Write: 10% 4553 30% 4553 50% 4553 90% 4553 99% 4553 ms Write: 10% 4385 30% 4385 50% 4385 90% 4385 99% 4385 ms Write: 10% 3863 30% 3863 50% 3863 90% 3863 99% 3863 ms Write: 10% 3594 30% 3594 50% 3594 90% 3594 99% 3594 ms Write: 10% 3634 30% 3634 50% 3634 90% 3634 99% 3634 ms Write: 10% 3719 30% 3719 50% 3719 90% 3719 99% 3719 ms Write: 10% 3213 30% 3213 50% 3213 90% 3213 99% 3213 ms Write: 10% 3302 30% 3302 50% 3302 90% 3302 99% 3302 ms Write: 10% 2862 30% 2862 50% 2862 90% 2862 99% 2862 ms Write: 10% 2828 30% 2828 50% 2828 90% 2828 99% 2828 ms Write: 10% 2409 30% 2409 50% 2409 90% 2409 99% 2409 ms Write: 10% 2943 30% 2943 50% 2943 90% 2943 99% 2943 ms Write: 10% 2846 30% 2846 50% 2846 90% 2846 99% 2846 ms Write: 10% 2177 30% 2177 50% 2177 90% 2177 99% 2177 ms Write: 10% 2385 30% 2385 50% 2385 90% 2385 99% 2385 ms Write: 10% 2597 30% 2597 50% 2597 90% 2597 99% 2597 ms Write: 10% 2489 30% 2489 50% 2489 90% 2489 99% 2489 ms Write: 10% 2165 30% 2165 50% 2165 90% 2165 99% 2165 ms Write: 10% 2068 30% 2068 50% 2068 90% 2068 99% 2068 ms Write: 10% 2036 30% 2036 50% 2036 90% 2036 99% 2036 ms Write: 10% 1972 30% 1972 50% 1972 90% 1972 99% 1972 ms Write: 10% 1968 30% 1968 50% 1968 90% 1968 99% 1968 ms Write: 10% 2072 30% 2072 50% 2072 90% 2072 99% 2072 ms Write: 10% 1865 30% 1865 50% 1865 90% 1865 99% 1865 ms Write: 10% 1957 30% 1957 50% 1957 90% 1957 99% 1957 ms Write: 10% 1825 30% 1825 50% 1825 90% 1825 99% 1825 ms Write: 10% 2348 30% 2348 50% 2348 90% 2348 99% 2348 ms Write: 10% 1688 30% 1688 50% 1688 90% 1688 99% 1688 ms Write: 10% 1569 30% 1569 50% 1569 90% 1569 99% 1569 ms Write: 10% 1832 30% 1832 50% 1832 90% 1832 99% 1832 ms Write: 10% 1867 30% 1867 50% 1867 90% 1867 99% 1867 ms Step 2. read write Write: 10% 8249 30% 8249 50% 8249 90% 8249 99% 8249 ms Write: 10% 8668 30% 8668 50% 8668 90% 8668 99% 8668 ms Write: 10% 8836 30% 8836 50% 8836 90% 8836 99% 8836 ms Write: 10% 8010 30% 8010 50% 8010 90% 8010 99% 8010 ms Write: 10% 7914 30% 7914 50% 7914 90% 7914 99% 7914 ms Write: 10% 8628 30% 8628 50% 8628 90% 8628 99% 8628 ms Write: 10% 9196 30% 9196 50% 9196 90% 9196 99% 9196 ms Write: 10% 8369 30% 8369 50% 8369 90% 8369 99% 8369 ms Write: 10% 8028 30% 8028 50% 8028 90% 8028 99% 8028 ms Write: 10% 7400 30% 7400 50% 7400 90% 7400 99% 7400 ms Write: 10% 7400 30% 7400 50% 7400 90% 7400 99% 7400 ms Write: 10% 6706 30% 6706 50% 6706 90% 6706 99% 6706 ms Write: 10% 6969 30% 6969 50% 6969 90% 6969 99% 6969 ms Write: 10% 5999 30% 5999 50% 5999 90% 5999 99% 5999 ms Write: 10% 6838 30% 6838 50% 6838 90% 6838 99% 6838 ms Write: 10% 6275 30% 6275 50% 6275 90% 6275 99% 6275 ms Write: 10% 6483 30% 6483 50% 6483 90% 6483 99% 6483 ms Write: 10% 6180 30% 6180 50% 6180 90% 6180 99% 6180 ms Write: 10% 6062 30% 6062 50% 6062 90% 6062 99% 6062 ms Write: 10% 6030 30% 6030 50% 6030 90% 6030 99% 6030 ms Write: 10% 5941 30% 5941 50% 5941 90% 5941 99% 5941 ms Write: 10% 5880 30% 5880 50% 5880 90% 5880 99% 5880 ms Write: 10% 5597 30% 5597 50% 5597 90% 5597 99% 5597 ms Write: 10% 5720 30% 5720 50% 5720 90% 5720 99% 5720 ms Write: 10% 5333 30% 5333 50% 5333 90% 5333 99% 5333 ms Write: 10% 5878 30% 5878 50% 5878 90% 5878 99% 5878 ms Write: 10% 5241 30% 5241 50% 5241 90% 5241 99% 5241 ms Write: 10% 5160 30% 5160 50% 5160 90% 5160 99% 5160 ms Write: 10% 5353 30% 5353 50% 5353 90% 5353 99% 5353 ms Write: 10% 4769 30% 4769 50% 4769 90% 4769 99% 4769 ms Write: 10% 4657 30% 4657 50% 4657 90% 4657 99% 4657 ms Write: 10% 3935 30% 3935 50% 3935 90% 3935 99% 3935 ms Write: 10% 4874 30% 4874 50% 4874 90% 4874 99% 4874 ms Write: 10% 3550 30% 3550 50% 3550 90% 3550 99% 3550 ms Write: 10% 3952 30% 3952 50% 3952 90% 3952 99% 3952 ms Write: 10% 3505 30% 3505 50% 3505 90% 3505 99% 3505 ms Write: 10% 3898 30% 3898 50% 3898 90% 3898 99% 3898 ms Write: 10% 4821 30% 4821 50% 4821 90% 4821 99% 4821 ms Write: 10% 3716 30% 3716 50% 3716 90% 3716 99% 3716 ms Write: 10% 3897 30% 3897 50% 3897 90% 3897 99% 3897 ms Write: 10% 3328 30% 3328 50% 3328 90% 3328 99% 3328 ms Write: 10% 2979 30% 2979 50% 2979 90% 2979 99% 2979 ms Write: 10% 3261 30% 3261 50% 3261 90% 3261 99% 3261 ms Write: 10% 3755 30% 3755 50% 3755 90% 3755 99% 3755 ms Write: 10% 2800 30% 2800 50% 2800 90% 2800 99% 2800 ms Write: 10% 2681 30% 2681 50% 2681 90% 2681 99% 2681 ms Write: 10% 4715 30% 4715 50% 4715 90% 4715 99% 4715 ms Write: 10% 2933 30% 2933 50% 2933 90% 2933 99% 2933 ms Write: 10% 2849 30% 2849 50% 2849 90% 2849 99% 2849 ms Write: 10% 2549 30% 2549 50% 2549 90% 2549 99% 2549 ms Write: 10% 3222 30% 3222 50% 3222 90% 3222 99% 3222 ms Write: 10% 2804 30% 2804 50% 2804 90% 2804 99% 2804 ms Write: 10% 2291 30% 2291 50% 2291 90% 2291 99% 2291 ms Write: 10% 2238 30% 2238 50% 2238 90% 2238 99% 2238 ms Write: 10% 1928 30% 1928 50% 1928 90% 1928 99% 1928 ms Write: 10% 2608 30% 2608 50% 2608 90% 2608 99% 2608 ms Write: 10% 1888 30% 1888 50% 1888 90% 1888 99% 1888 ms Write: 10% 1923 30% 1923 50% 1923 90% 1923 99% 1923 ms Write: 10% 2325 30% 2325 50% 2325 90% 2325 99% 2325 ms Write: 10% 2545 30% 2545 50% 2545 90% 2545 99% 2545 ms Write: 10% 1885 30% 1885 50% 1885 90% 1885 99% 1885 ms Write: 10% 2161 30% 2161 50% 2161 90% 2161 99% 2161 ms Write: 10% 2256 30% 2256 50% 2256 90% 2256 99% 2256 ms Write: 10% 2369 30% 2369 50% 2369 90% 2369 99% 2369 ms Read: 10% 14299 30% 14299 50% 14299 90% 14299 99% 14299 ms Step 3. write modify Write: 10% 7709 30% 7709 50% 7709 90% 7709 99% 7709 ms Write: 10% 7691 30% 7691 50% 7691 90% 7691 99% 7691 ms Write: 10% 7965 30% 7965 50% 7965 90% 7965 99% 7965 ms Write: 10% 9500 30% 9500 50% 9500 90% 9500 99% 9500 ms Write: 10% 7970 30% 7970 50% 7970 90% 7970 99% 7970 ms Write: 10% 8373 30% 8373 50% 8373 90% 8373 99% 8373 ms Write: 10% 8858 30% 8858 50% 8858 90% 8858 99% 8858 ms Write: 10% 8393 30% 8393 50% 8393 90% 8393 99% 8393 ms Write: 10% 8571 30% 8571 50% 8571 90% 8571 99% 8571 ms Write: 10% 8747 30% 8747 50% 8747 90% 8747 99% 8747 ms Write: 10% 8445 30% 8445 50% 8445 90% 8445 99% 8445 ms Write: 10% 8546 30% 8546 50% 8546 90% 8546 99% 8546 ms Write: 10% 7892 30% 7892 50% 7892 90% 7892 99% 7892 ms Write: 10% 8304 30% 8304 50% 8304 90% 8304 99% 8304 ms Write: 10% 7723 30% 7723 50% 7723 90% 7723 99% 7723 ms Write: 10% 8218 30% 8218 50% 8218 90% 8218 99% 8218 ms Write: 10% 7720 30% 7720 50% 7720 90% 7720 99% 7720 ms Write: 10% 7117 30% 7117 50% 7117 90% 7117 99% 7117 ms Write: 10% 7318 30% 7318 50% 7318 90% 7318 99% 7318 ms Write: 10% 8217 30% 8217 50% 8217 90% 8217 99% 8217 ms Write: 10% 7446 30% 7446 50% 7446 90% 7446 99% 7446 ms Write: 10% 7319 30% 7319 50% 7319 90% 7319 99% 7319 ms Write: 10% 7184 30% 7184 50% 7184 90% 7184 99% 7184 ms Write: 10% 7308 30% 7308 50% 7308 90% 7308 99% 7308 ms Write: 10% 6865 30% 6865 50% 6865 90% 6865 99% 6865 ms Write: 10% 6413 30% 6413 50% 6413 90% 6413 99% 6413 ms Write: 10% 6403 30% 6403 50% 6403 90% 6403 99% 6403 ms Write: 10% 6814 30% 6814 50% 6814 90% 6814 99% 6814 ms Write: 10% 7050 30% 7050 50% 7050 90% 7050 99% 7050 ms Write: 10% 6474 30% 6474 50% 6474 90% 6474 99% 6474 ms Write: 10% 6439 30% 6439 50% 6439 90% 6439 99% 6439 ms Write: 10% 6058 30% 6058 50% 6058 90% 6058 99% 6058 ms Write: 10% 6368 30% 6368 50% 6368 90% 6368 99% 6368 ms Write: 10% 5945 30% 5945 50% 5945 90% 5945 99% 5945 ms Write: 10% 6060 30% 6060 50% 6060 90% 6060 99% 6060 ms Write: 10% 5452 30% 5452 50% 5452 90% 5452 99% 5452 ms Write: 10% 6031 30% 6031 50% 6031 90% 6031 99% 6031 ms Write: 10% 5414 30% 5414 50% 5414 90% 5414 99% 5414 ms Write: 10% 6120 30% 6120 50% 6120 90% 6120 99% 6120 ms Write: 10% 5155 30% 5155 50% 5155 90% 5155 99% 5155 ms Write: 10% 5172 30% 5172 50% 5172 90% 5172 99% 5172 ms Write: 10% 6726 30% 6726 50% 6726 90% 6726 99% 6726 ms Write: 10% 5001 30% 5001 50% 5001 90% 5001 99% 5001 ms Write: 10% 5693 30% 5693 50% 5693 90% 5693 99% 5693 ms Write: 10% 4359 30% 4359 50% 4359 90% 4359 99% 4359 ms Write: 10% 4485 30% 4485 50% 4485 90% 4485 99% 4485 ms Write: 10% 4684 30% 4684 50% 4684 90% 4684 99% 4684 ms Write: 10% 4303 30% 4303 50% 4303 90% 4303 99% 4303 ms Write: 10% 4376 30% 4376 50% 4376 90% 4376 99% 4376 ms Write: 10% 5046 30% 5046 50% 5046 90% 5046 99% 5046 ms Write: 10% 5124 30% 5124 50% 5124 90% 5124 99% 5124 ms Write: 10% 4102 30% 4102 50% 4102 90% 4102 99% 4102 ms Write: 10% 4252 30% 4252 50% 4252 90% 4252 99% 4252 ms Write: 10% 4232 30% 4232 50% 4232 90% 4232 99% 4232 ms Write: 10% 4283 30% 4283 50% 4283 90% 4283 99% 4283 ms Write: 10% 4161 30% 4161 50% 4161 90% 4161 99% 4161 ms Write: 10% 4584 30% 4584 50% 4584 90% 4584 99% 4584 ms Write: 10% 4168 30% 4168 50% 4168 90% 4168 99% 4168 ms Write: 10% 3982 30% 3982 50% 3982 90% 3982 99% 3982 ms Write: 10% 4011 30% 4011 50% 4011 90% 4011 99% 4011 ms Write: 10% 4079 30% 4079 50% 4079 90% 4079 99% 4079 ms Write: 10% 3944 30% 3944 50% 3944 90% 3944 99% 3944 ms Write: 10% 4236 30% 4236 50% 4236 90% 4236 99% 4236 ms Write: 10% 4030 30% 4030 50% 4030 90% 4030 99% 4030 ms Update: 10% 1045 30% 1045 50% 1045 90% 1045 99% 1045 ms Step 4. read modify write Write: 10% 1371 30% 1371 50% 1371 90% 1371 99% 1371 ms Write: 10% 7805 30% 7805 50% 7805 90% 7805 99% 7805 ms Write: 10% 8406 30% 8406 50% 8406 90% 8406 99% 8406 ms Write: 10% 8427 30% 8427 50% 8427 90% 8427 99% 8427 ms Write: 10% 9544 30% 9544 50% 9544 90% 9544 99% 9544 ms Write: 10% 8371 30% 8371 50% 8371 90% 8371 99% 8371 ms Write: 10% 8192 30% 8192 50% 8192 90% 8192 99% 8192 ms Write: 10% 8302 30% 8302 50% 8302 90% 8302 99% 8302 ms Write: 10% 7955 30% 7955 50% 7955 90% 7955 99% 7955 ms Write: 10% 8119 30% 8119 50% 8119 90% 8119 99% 8119 ms Write: 10% 7940 30% 7940 50% 7940 90% 7940 99% 7940 ms Write: 10% 8695 30% 8695 50% 8695 90% 8695 99% 8695 ms Write: 10% 7950 30% 7950 50% 7950 90% 7950 99% 7950 ms Write: 10% 8040 30% 8040 50% 8040 90% 8040 99% 8040 ms Write: 10% 7562 30% 7562 50% 7562 90% 7562 99% 7562 ms Write: 10% 7416 30% 7416 50% 7416 90% 7416 99% 7416 ms Write: 10% 7484 30% 7484 50% 7484 90% 7484 99% 7484 ms Write: 10% 7468 30% 7468 50% 7468 90% 7468 99% 7468 ms Write: 10% 7413 30% 7413 50% 7413 90% 7413 99% 7413 ms Write: 10% 7220 30% 7220 50% 7220 90 ... , line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f4fa72f9640 (most recent call first): File "contrib/tools/python3/Lib/logging/__init__.py", line 973 in acquire File "contrib/tools/python3/Lib/logging/__init__.py", line 1026 in handle File "contrib/tools/python3/Lib/logging/__init__.py", line 1762 in callHandlers File "contrib/tools/python3/Lib/logging/__init__.py", line 1700 in handle File "contrib/tools/python3/Lib/logging/__init__.py", line 1684 in _log File "contrib/tools/python3/Lib/logging/__init__.py", line 1527 in debug File "contrib/python/ydb/py3/ydb/resolver.py", line 175 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f4fa6af8640 (most recent call first): File "contrib/tools/python3/Lib/logging/__init__.py", line 973 in acquire File "contrib/tools/python3/Lib/logging/__init__.py", line 1026 in handle File "contrib/tools/python3/Lib/logging/__init__.py", line 1762 in callHandlers File "contrib/tools/python3/Lib/logging/__init__.py", line 1700 in handle File "contrib/tools/python3/Lib/logging/__init__.py", line 1684 in _log File "contrib/tools/python3/Lib/logging/__init__.py", line 1527 in debug File "contrib/python/ydb/py3/ydb/resolver.py", line 175 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f4fa8afc640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f4fa92fd640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f4fb26fc640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f500a3ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f501e7ff640 (most recent call first): File "contrib/tools/python3/Lib/logging/__init__.py", line 973 in acquire File "contrib/tools/python3/Lib/logging/__init__.py", line 1026 in handle File "contrib/tools/python3/Lib/logging/__init__.py", line 1762 in callHandlers File "contrib/tools/python3/Lib/logging/__init__.py", line 1700 in handle File "contrib/tools/python3/Lib/logging/__init__.py", line 1684 in _log File "contrib/tools/python3/Lib/logging/__init__.py", line 1527 in debug File "contrib/python/ydb/py3/ydb/resolver.py", line 175 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f50345ff640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 99 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 151 in result File "contrib/python/ydb/py3/ydb/connection.py", line 476 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f5050284440 (most recent call first): File "contrib/tools/python3/Lib/posixpath.py", line 462 in _joinrealpath File "contrib/tools/python3/Lib/posixpath.py", line 427 in realpath File "contrib/tools/python3/Lib/inspect.py", line 1016 in getmodule File "contrib/tools/python3/Lib/inspect.py", line 1090 in findsource File "contrib/python/pytest/py3/_pytest/_code/source.py", line 121 in findsource File "contrib/python/pytest/py3/_pytest/_code/code.py", line 108 in fullsource File "contrib/python/pytest/py3/_pytest/_code/code.py", line 252 in getsource File "contrib/python/pytest/py3/_pytest/_code/code.py", line 754 in _getentrysource File "contrib/python/pytest/py3/_pytest/_code/code.py", line 852 in repr_traceback_entry File "contrib/python/pytest/py3/_pytest/_code/code.py", line 914 in repr_traceback File "contrib/python/pytest/py3/_pytest/_code/code.py", line 989 in repr_excinfo File "contrib/python/pytest/py3/_pytest/_code/code.py", line 701 in getrepr File "contrib/python/pytest/py3/_pytest/terminal.py", line 891 in pytest_keyboard_interrupt File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 285 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/s8wm/000021/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/s8wm/000021/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/s8wm/000021/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/s8wm/000021', '--source-root', '/home/runner/.ya/build/build_root/s8wm/000021/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/s8wm/000021/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1738, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/build/build_root/s8wm/000021/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/s8wm/000021/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/s8wm/000021/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/s8wm/000021', '--source-root', '/home/runner/.ya/build/build_root/s8wm/000021/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/s8wm/000021/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8580453620/test_tool', '--ya-version', '2', '--collect-cores', '--build-type', 'relwithdebinfo', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no']' stopped by 600 seconds timeout",), {}) 2025-05-05 11:06:47,787 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-05-05 11:06:47,787 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.6%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT ydb/library/yaml_config/ut_transform [size:medium] ------ sole chunk ran 1 test (total:3.64s - test:3.13s canon:0.32s) [fail] test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [default-linux-x86_64-relwithdebinfo] (1.19s) Test results differ from canonical: test_result[3]: files content differs: 'ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff' has finished unexpectedly with rc = 1 stdout: stderr: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff/test_transform.py.TestYamlConfigTransformations.test_basic.args1-dump_ds_init.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yaml_config/ut_transform/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/library/yaml_config/ut_transform ydb/tests/functional/hive [size:medium] nchunks:20 ------ [test_drain.py 0/20] chunk ran 1 test (total:17.67s - test:17.60s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-relwithdebinfo] (15.59s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 21 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224038620: 4) (72075186224038636: 5) (72075186224038667: 4) (72075186224038740: 4) (72075186224038747: 6) (72075186224038755: 5) (72075186224038795: 6) (72075186224038819: 6) (72075186224038843: 4) (72075186224038965: 5). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:107.11s - setup:0.01s test:106.68s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (103.92s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:97.28s - setup:0.03s test:96.74s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (94.06s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/tenants [size:medium] nchunks:20 ------ [0/20] chunk ran 1 test (total:15.63s - test:15.58s) [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (12.33s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [1/20] chunk ran 1 test (total:15.12s - setup:0.01s test:15.06s) [fail] test_dynamic_tenants.py::test_create_and_drop_the_same_tenant2[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (12.25s) ydb/tests/functional/tenants/test_dynamic_tenants.py:350: in test_create_and_drop_the_same_tenant2 pool.retry_operation_sync(write_some_data, None, "table", "table_for_rm", value) contrib/python/ydb/py3/ydb/table.py:2632: in retry_operation_sync return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/table.py:2630: in wrapped_callee return callee(session, *args, **kwargs) ydb/tests/functional/tenants/test_dynamic_tenants.py:340: in write_some_data session.transaction().execute( contrib/python/ydb/py3/ydb/table.py:2369: in execute return self._driver( contrib/python/ydb/py3/ydb/tracing.py:70: in wrapper return f(self, *args, **kwargs) contrib/python/ydb/py3/ydb/pool.py:443: in __call__ res = connection( contrib/python/ydb/py3/ydb/connection.py:465: in __call__ return response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:229: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.SchemeError: message: "Failed to create default pool in database /Root/users/database" severity: 1 issues { message: "Failed to create resource pool: StatusPathDoesNotExist" severity: 1 issues { message: "Path does not exist" issue_code: 200200 severity: 1 } } ,message: "Query failed during adding/waiting in workload pool " severity: 1 (server_code: 400070) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_dynamic_tenants.py.test_create_and_drop_the_same_tenant2.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [2/20] chunk ran 1 test (total:16.55s - setup:0.02s test:16.49s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (14.04s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [3/20] chunk ran 1 test (total:16.58s - setup:0.03s test:16.50s) [fail] test_tenants.py::TestTenants::test_list_database_above[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (14.13s) ydb/tests/functional/tenants/test_tenants.py:433: in test_list_database_above assert result.children[0].name == ".sys" E AssertionError: assert '.metadata' == '.sys' E - .sys E + .metadata Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_list_database_above.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [4/20] chunk ran 1 test (total:76.42s - test:76.36s) [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--false] [default-linux-x86_64-relwithdebinfo] (73.56s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ [5/20] chunk ran 1 test (total:76.77s - test:76.71s) [fail] test_tenants.py::TestTenants::test_stop_start[enable_alter_database_create_hive_first--true] [default-linux-x86_64-relwithdebinfo] (73.50s) ydb/tests/functional/tenants/test_tenants.py:297: in test_stop_start with ydb_database_ctx(ydb_cluster, database_path): contrib/tools/python3/Lib/contextlib.py:137: in __enter__ return next(self.gen) ydb/tests/library/fixtures/__init__.py:88: in ydb_database_ctx ydb_cluster.create_database(database_path, storage_pool_units_count=storage_pools, timeout_seconds=timeout_seconds, token=token) ydb/tests/library/harness/kikimr_cluster_interface.py:217: in create_database raise RuntimeError('create_database failed: %s, %s' % (operation.status, ydb.issues._format_issues(operation.issues))) E RuntimeError: create_database failed: 400080, message: "Group fit error BoxId# 1 StoragePoolId# 5 Error# failed to allocate group: no group options PDisks# {[(1:1-S)]}" severity: 1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff/test_tenants.py.TestTenants.test_stop_start.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tenants/test-results/py3test/testing_out_stuff ------ FAIL: 6 - FAIL ydb/tests/functional/tenants ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 18 tests (total:632.43s - test:600.09s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 173.66s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (good) duration: 118.04s test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] (good) duration: 112.54s test_alter_tiering.py::TestAlterTiering::test[many_tables] (timeout) duration: 108.73s test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 73.75s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 16.89s test_simple.py::TestSimple::test[tablestores] (good) duration: 12.45s test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] (good) duration: 2.25s test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] (good) duration: 2.17s test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] (good) duration: 2.13s 8 more tests with 5.85s total duration are not listed. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-relwithdebinfo] (108.73s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 17 - GOOD, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:10 ------ [0/10] chunk ran 1 test (total:609.18s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 607.47s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-relwithdebinfo] (607.47s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 1 - TIMEOUT ydb/tests/olap/ttl_tiering ------ sole chunk ran 1 test (total:235.95s - test:235.50s) Info: Test run has exceeded 32.0G (33554432K) memory limit with 75.7G (79387368K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 49613 44.8M 44.8M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 49805 32.8M 20.8M 8.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 49810 255M 258M 205M └─ ydb-tests-stress-olap_workload-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:fac 51331 8.5G 8.5G 8.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o 51355 8.2G 8.2G 8.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o 51378 8.2G 8.2G 8.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o 51400 8.4G 8.4G 8.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o 51418 8.2G 8.2G 8.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o 51422 8.2G 8.2G 8.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o 51424 9.2G 9.2G 9.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o 51506 8.3G 8.2G 8.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o 51511 8.2G 8.2G 8.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_o Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:242.61s - setup:0.01s test:240.24s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.0G (11515748K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 49600 44.8M 44.8M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 49797 32.5M 20.9M 8.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 49803 1.3G 1.3G 1.3G └─ ydb-tests-stress-simple_queue-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:fact 51258 1.2G 1.2G 998M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou 51270 1.0G 1.0G 824M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou 51272 1.0G 1.0G 819M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou 51273 1.2G 1.2G 991M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou 51274 1.0G 1.0G 808M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou 51275 1.0G 1.0G 842M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou 51276 1.1G 1.1G 847M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou 51277 1.0G 1.0G 804M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou 51284 1.0G 1.0G 812M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_ou Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/simple_queue/tests/test-results/py3test/testing_out_stuff/stderr Total 10 suites: 4 - GOOD 4 - FAIL 2 - TIMEOUT Total 37 tests: 25 - GOOD 10 - FAIL 2 - TIMEOUT Cache efficiency ratio is 99.61% (35507 of 35647). Local: 40 (0.11%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 35467 (99.50%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [632868 ms] [TM] [rnd-yxrb31flh5s4sfod default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 0 (1746442576579), finished: 632868 (1746443209447)] Time from start: 652753.9140625 ms, time elapsed by graph 632868 ms, time diff 19885.9140625 ms. The longest 10 tasks: [632868 ms] [TM] [rnd-yxrb31flh5s4sfod default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/scenario/py3test [started: 1746442576579, finished: 1746443209447] [609638 ms] [TM] [rnd-16178028703505630034 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746442573391, finished: 1746443183029] [541877 ms] [TM] [rnd-3466550287544265583 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746442573231, finished: 1746443115108] [327222 ms] [TM] [rnd-10070386375820369426 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746442573171, finished: 1746442900393] [243062 ms] [TM] [rnd-blgywwl52158jpmg default-linux-x86_64 relwithdebinfo]: ydb/tests/stress/simple_queue/tests/py3test [started: 1746442576547, finished: 1746442819609] [236357 ms] [TM] [rnd-gm4asdtevhtl6b57 default-linux-x86_64 relwithdebinfo]: ydb/tests/stress/olap_workload/tests/py3test [started: 1746442576640, finished: 1746442812997] [165845 ms] [TM] [rnd-4977312343813472984 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/data_quotas/py3test [started: 1746442573210, finished: 1746442739055] [156537 ms] [TM] [rnd-452254577824717203 default-linux-x86_64 relwithdebinfo]: ydb/tests/olap/ttl_tiering/py3test [started: 1746442573459, finished: 1746442729996] [107612 ms] [TM] [rnd-16370575650294575106 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/serverless/py3test [started: 1746442576257, finished: 1746442683869] [ 97838 ms] [TM] [rnd-800358239491359913 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/serverless/py3test [started: 1746442575632, finished: 1746442673470] Total time by type: [3632918 ms] [TM] [count: 134, ave time 27111.33 msec] [ 10144 ms] [prepare:get from local cache] [count: 40, ave time 253.60 msec] [ 5124 ms] [TA] [count: 6, ave time 854.00 msec] [ 4279 ms] [prepare:bazel-store] [count: 1, ave time 4279.00 msec] [ 1616 ms] [prepare:AC] [count: 2, ave time 808.00 msec] [ 1570 ms] [prepare:tools] [count: 16, ave time 98.12 msec] [ 506 ms] [prepare:put to dist cache] [count: 25, ave time 20.24 msec] [ 64 ms] [prepare:clean] [count: 3, ave time 21.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 3638042 ms (100.00%) Total run tasks time - 3638042 ms Configure time - 23.3 s Statistics overhead 868 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json Ok + echo 0