mirror of
https://github.com/m-bain/whisperX.git
synced 2025-07-01 18:17:27 -04:00
feat: add local_files_only option on whisperx.load_model for offline mode (#867)
Adds the parameter local_files_only (default False for consistency) to whisperx.load_model so that the user can avoid downloading the file and return the path to the local cached file if it exists. --------- Co-authored-by: Barabazs <31799121+Barabazs@users.noreply.github.com>
This commit is contained in:
@ -269,6 +269,7 @@ def load_model(whisper_arch,
|
|||||||
model : Optional[WhisperModel] = None,
|
model : Optional[WhisperModel] = None,
|
||||||
task="transcribe",
|
task="transcribe",
|
||||||
download_root=None,
|
download_root=None,
|
||||||
|
local_files_only=False,
|
||||||
threads=4):
|
threads=4):
|
||||||
'''Load a Whisper model for inference.
|
'''Load a Whisper model for inference.
|
||||||
Args:
|
Args:
|
||||||
@ -279,6 +280,7 @@ def load_model(whisper_arch,
|
|||||||
language: str - The language of the model. (use English for now)
|
language: str - The language of the model. (use English for now)
|
||||||
model: Optional[WhisperModel] - The WhisperModel instance to use.
|
model: Optional[WhisperModel] - The WhisperModel instance to use.
|
||||||
download_root: Optional[str] - The root directory to download the model to.
|
download_root: Optional[str] - The root directory to download the model to.
|
||||||
|
local_files_only: bool - If `True`, avoid downloading the file and return the path to the local cached file if it exists.
|
||||||
threads: int - The number of cpu threads to use per worker, e.g. will be multiplied by num workers.
|
threads: int - The number of cpu threads to use per worker, e.g. will be multiplied by num workers.
|
||||||
Returns:
|
Returns:
|
||||||
A Whisper pipeline.
|
A Whisper pipeline.
|
||||||
@ -292,6 +294,7 @@ def load_model(whisper_arch,
|
|||||||
device_index=device_index,
|
device_index=device_index,
|
||||||
compute_type=compute_type,
|
compute_type=compute_type,
|
||||||
download_root=download_root,
|
download_root=download_root,
|
||||||
|
local_files_only=local_files_only,
|
||||||
cpu_threads=threads)
|
cpu_threads=threads)
|
||||||
if language is not None:
|
if language is not None:
|
||||||
tokenizer = faster_whisper.tokenizer.Tokenizer(model.hf_tokenizer, model.model.is_multilingual, task=task, language=language)
|
tokenizer = faster_whisper.tokenizer.Tokenizer(model.hf_tokenizer, model.model.is_multilingual, task=task, language=language)
|
||||||
|
Reference in New Issue
Block a user