Created
June 6, 2025 04:39
-
-
Save vanbasten23/6772e44bc8b562256c3b184fb403c2b5 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ERROR 06-06 04:39:02 [core.py:515] EngineCore failed to start. | |
ERROR 06-06 04:39:02 [core.py:515] Traceback (most recent call last): | |
ERROR 06-06 04:39:02 [core.py:515] File "/home/xiowei/vllm/vllm/v1/engine/core.py", line 506, in run_engine_core | |
ERROR 06-06 04:39:02 [core.py:515] engine_core = EngineCoreProc(*args, **kwargs) | |
ERROR 06-06 04:39:02 [core.py:515] File "/home/xiowei/vllm/vllm/v1/engine/core.py", line 390, in __init__ | |
ERROR 06-06 04:39:02 [core.py:515] super().__init__(vllm_config, executor_class, log_stats, | |
ERROR 06-06 04:39:02 [core.py:515] File "/home/xiowei/vllm/vllm/v1/engine/core.py", line 76, in __init__ | |
ERROR 06-06 04:39:02 [core.py:515] self.model_executor = executor_class(vllm_config) | |
ERROR 06-06 04:39:02 [core.py:515] File "/home/xiowei/vllm/vllm/executor/executor_base.py", line 53, in __init__ | |
ERROR 06-06 04:39:02 [core.py:515] self._init_executor() | |
ERROR 06-06 04:39:02 [core.py:515] File "/home/xiowei/vllm/vllm/v1/executor/multiproc_executor.py", line 94, in _init_executor | |
ERROR 06-06 04:39:02 [core.py:515] self.workers = WorkerProc.wait_for_ready(unready_workers) | |
ERROR 06-06 04:39:02 [core.py:515] File "/home/xiowei/vllm/vllm/v1/executor/multiproc_executor.py", line 423, in wait_for_ready | |
ERROR 06-06 04:39:02 [core.py:515] raise e from None | |
ERROR 06-06 04:39:02 [core.py:515] Exception: WorkerProc initialization failed due to an exception in a background process. See stack trace for root cause. | |
Process EngineCore_0: | |
Traceback (most recent call last): | |
File "/home/xiowei/miniconda3/envs/vllm/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap | |
self.run() | |
File "/home/xiowei/miniconda3/envs/vllm/lib/python3.10/multiprocessing/process.py", line 108, in run | |
self._target(*self._args, **self._kwargs) | |
File "/home/xiowei/vllm/vllm/v1/engine/core.py", line 519, in run_engine_core | |
raise e | |
File "/home/xiowei/vllm/vllm/v1/engine/core.py", line 506, in run_engine_core | |
engine_core = EngineCoreProc(*args, **kwargs) | |
File "/home/xiowei/vllm/vllm/v1/engine/core.py", line 390, in __init__ | |
super().__init__(vllm_config, executor_class, log_stats, | |
File "/home/xiowei/vllm/vllm/v1/engine/core.py", line 76, in __init__ | |
self.model_executor = executor_class(vllm_config) | |
File "/home/xiowei/vllm/vllm/executor/executor_base.py", line 53, in __init__ | |
self._init_executor() | |
File "/home/xiowei/vllm/vllm/v1/executor/multiproc_executor.py", line 94, in _init_executor | |
self.workers = WorkerProc.wait_for_ready(unready_workers) | |
File "/home/xiowei/vllm/vllm/v1/executor/multiproc_executor.py", line 423, in wait_for_ready | |
raise e from None | |
Exception: WorkerProc initialization failed due to an exception in a background process. See stack trace for root cause. | |
Traceback (most recent call last): | |
File "/home/xiowei/miniconda3/envs/vllm/bin/vllm", line 8, in <module> | |
sys.exit(main()) | |
File "/home/xiowei/vllm/vllm/entrypoints/cli/main.py", line 59, in main | |
args.dispatch_function(args) | |
File "/home/xiowei/vllm/vllm/entrypoints/cli/serve.py", line 58, in cmd | |
uvloop.run(run_server(args)) | |
File "/home/xiowei/miniconda3/envs/vllm/lib/python3.10/site-packages/uvloop/__init__.py", line 82, in run | |
return loop.run_until_complete(wrapper()) | |
File "uvloop/loop.pyx", line 1518, in uvloop.loop.Loop.run_until_complete | |
File "/home/xiowei/miniconda3/envs/vllm/lib/python3.10/site-packages/uvloop/__init__.py", line 61, in wrapper | |
return await main | |
File "/home/xiowei/vllm/vllm/entrypoints/openai/api_server.py", line 1325, in run_server | |
await run_server_worker(listen_address, sock, args, **uvicorn_kwargs) | |
File "/home/xiowei/vllm/vllm/entrypoints/openai/api_server.py", line 1345, in run_server_worker | |
async with build_async_engine_client(args, client_config) as engine_client: | |
File "/home/xiowei/miniconda3/envs/vllm/lib/python3.10/contextlib.py", line 199, in __aenter__ | |
return await anext(self.gen) | |
File "/home/xiowei/vllm/vllm/entrypoints/openai/api_server.py", line 157, in build_async_engine_client | |
async with build_async_engine_client_from_engine_args( | |
File "/home/xiowei/miniconda3/envs/vllm/lib/python3.10/contextlib.py", line 199, in __aenter__ | |
return await anext(self.gen) | |
File "/home/xiowei/vllm/vllm/entrypoints/openai/api_server.py", line 193, in build_async_engine_client_from_engine_args | |
async_llm = AsyncLLM.from_vllm_config( | |
File "/home/xiowei/vllm/vllm/v1/engine/async_llm.py", line 162, in from_vllm_config | |
return cls( | |
File "/home/xiowei/vllm/vllm/v1/engine/async_llm.py", line 124, in __init__ | |
self.engine_core = EngineCoreClient.make_async_mp_client( | |
File "/home/xiowei/vllm/vllm/v1/engine/core_client.py", line 93, in make_async_mp_client | |
return AsyncMPClient(vllm_config, executor_class, log_stats, | |
File "/home/xiowei/vllm/vllm/v1/engine/core_client.py", line 716, in __init__ | |
super().__init__( | |
File "/home/xiowei/vllm/vllm/v1/engine/core_client.py", line 422, in __init__ | |
self._init_engines_direct(vllm_config, local_only, | |
File "/home/xiowei/vllm/vllm/v1/engine/core_client.py", line 491, in _init_engines_direct | |
self._wait_for_engine_startup(handshake_socket, input_address, | |
File "/home/xiowei/vllm/vllm/v1/engine/core_client.py", line 511, in _wait_for_engine_startup | |
wait_for_engine_startup( | |
File "/home/xiowei/vllm/vllm/v1/utils.py", line 494, in wait_for_engine_startup | |
raise RuntimeError("Engine core initialization failed. " | |
RuntimeError: Engine core initialization failed. See root cause above. Failed core proc(s): {} | |
/home/xiowei/miniconda3/envs/vllm/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 8 leaked semaphore objects to clean up at shutdown | |
warnings.warn('resource_tracker: There appear to be %d ' | |
/home/xiowei/miniconda3/envs/vllm/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 9 leaked shared_memory objects to clean up at shutdown | |
warnings.warn('resource_tracker: There appear to be %d ' |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment