Hello, I used Ubuntu 22.04, python 3.12, torch 2.8.0 to reason, and the following error was reported. How to handle it? Thank you.
Traceback (most recent call last):
File "/root/miniconda3/lib/python3.12/site-packages/transformers/image_processing_base.py", line 354, in get_image_processor_dict
resolved_image_processor_file = resolved_image_processor_files[0]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^
IndexError: list index out of range
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/root/miniconda3/lib/python3.12/multiprocessing/process.py", line 314, in _bootstrap
self.run()
File "/root/miniconda3/lib/python3.12/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/infer/deploy.py", line 255, in _deploy_main
return deploy_main(args)
^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/infer/deploy.py", line 240, in deploy_main
SwiftDeploy(args).main()
^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/infer/deploy.py", line 54, in __init__
super().__init__(args)
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/infer/infer.py", line 33, in __init__
model, self.template = prepare_model_template(args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/infer/utils.py", line 146, in prepare_model_template
model, processor = args.get_model_processor(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/argument/base_args/base_args.py", line 323, in get_model_processor
return get_model_tokenizer(**res)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/model/register.py", line 758, in get_model_tokenizer
model, processor = get_function(model_dir, model_info, model_kwargs, load_model, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/model/model/qwen.py", line 846, in get_model_tokenizer_qwen2_5_vl
return get_model_tokenizer_qwen2_vl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/model/model/qwen.py", line 760, in get_model_tokenizer_qwen2_vl
model, tokenizer = get_model_tokenizer_multimodal(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/model/register.py", line 441, in get_model_tokenizer_multimodal
processor = AutoProcessor.from_pretrained(model_dir, trust_remote_code=True)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/transformers/models/auto/processing_auto.py", line 396, in from_pretrained
return processor_class.from_pretrained(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/transformers/processing_utils.py", line 1394, in from_pretrained
args = cls._get_arguments_from_pretrained(pretrained_model_name_or_path, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/transformers/processing_utils.py", line 1453, in _get_arguments_from_pretrained
args.append(attribute_class.from_pretrained(pretrained_model_name_or_path, **kwargs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/transformers/models/auto/image_processing_auto.py", line 494, in from_pretrained
raise initial_exception
File "/root/miniconda3/lib/python3.12/site-packages/transformers/models/auto/image_processing_auto.py", line 476, in from_pretrained
config_dict, _ = ImageProcessingMixin.get_image_processor_dict(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/transformers/image_processing_base.py", line 361, in get_image_processor_dict
raise OSError(
OSError: Can't load image processor for '/root/models'. If you were trying to load it from 'https://huggingface.co/models', make sure you don't have a local directory with the same name. Otherwise, make sure '/root/models' is the correct path to a directory containing a preprocessor_config.json file
^C[INFO:swift] The deployment process has been terminated.
Traceback (most recent call last):
File "/root/miniconda3/lib/python3.12/site-packages/swift/cli/app.py", line 4, in <module>
app_main()
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/app/app.py", line 44, in app_main
return SwiftApp(args).main()
^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/base.py", line 49, in main
result = self.run()
^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/app/app.py", line 24, in run
with deploy_context as base_url:
File "/root/miniconda3/lib/python3.12/contextlib.py", line 137, in __enter__
return next(self.gen)
^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/llm/infer/deploy.py", line 275, in run_deploy
time.sleep(1)
KeyboardInterrupt
Traceback (most recent call last):
File "/root/miniconda3/bin/swift", line 8, in <module>
sys.exit(cli_main())
^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/swift/cli/main.py", line 110, in cli_main
result = subprocess.run(args)
^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/subprocess.py", line 550, in run
stdout, stderr = process.communicate(input, timeout=timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/subprocess.py", line 1201, in communicate
self.wait()
File "/root/miniconda3/lib/python3.12/subprocess.py", line 1264, in wait
return self._wait(timeout=timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/subprocess.py", line 2053, in _wait
(pid, sts) = self._try_wait(0)
^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/subprocess.py", line 2011, in _try_wait
(pid, sts) = os.waitpid(self.pid, wait_flags)
Hello, I used Ubuntu 22.04, python 3.12, torch 2.8.0 to reason, and the following error was reported. How to handle it? Thank you.