@@ -26,7 +26,7 @@ def is_quay_image(url: str) -> bool:
2626 "TEST_DATA_FILE" , "tests/e2e/vLLM/configs/int8_dynamic_per_token.yaml"
2727)
2828SKIP_HF_UPLOAD = os .environ .get ("SKIP_HF_UPLOAD" , "" )
29- # vllm environment: image url, deployed runner name, same (default), or the path of vllm virtualenv
29+ # vllm environment: same (default), the path of vllm virtualenv, image url, deployed runner name
3030VLLM_PYTHON_ENV = os .environ .get ("VLLM_PYTHON_ENV" , "same" )
3131IS_VLLM_IMAGE = False
3232IS_VLLM_IMAGE_DEPLOYED = False
@@ -231,6 +231,7 @@ def _run_vllm(self, logger):
231231 logger .info (self .vllm_env )
232232
233233 if IS_VLLM_IMAGE :
234+ # generate python command to run in the vllm image
234235 run_file_path = os .path .join (RUN_SAVE_DIR , "run_vllm.py" )
235236 shutil .copy (os .path .join (test_file_dir , "run_vllm.py" ),
236237 os .path .join (RUN_SAVE_DIR , "run_vllm.py" ))
@@ -257,7 +258,7 @@ def _run_vllm(self, logger):
257258 stderr = subprocess .PIPE ,
258259 text = True )
259260 else :
260- logger .info ("vllm image is pulled. Run vllm cmd with podman." )
261+ logger .info ("vllm image is pulled locally . Run vllm cmd with podman." )
261262 result = subprocess .Popen (
262263 [
263264 "podman" , "run" , "--rm" ,
0 commit comments