HuggingFace 套件离线使用方法
Last updated
Last updated
$ pip install huggingface_hubfrom huggingface_hub import hf_hub_download
hf_hub_download(repo_id="bigscience/T0_3B", filename="config.json", cache_dir="./your/path/bigscience_t0")from transformers import AutoConfig, AutoTokenizer, AutoModelForSeq2SeqLM
model_name = r'facebook/muppet-roberta-base'
save_dir = r'./your/path/muppet-roberta-base'
# download
model = AutoModel.from_pretrained(model_name)
config = AutoConfig.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# save
model.save_pretrained(save_dir)
config.save_pretrained(save_dir)
tokenizer.save_pretrained(save_dir)
# reload
model = AutoModel.from_pretrained(save_dir)
config = AutoConfig.from_pretrained(save_dir)
tokenizer = AutoTokenizer.from_pretrained(save_dir)import os
os.environ['TRANSFORMERS_OFFLINE'] = '1' # 模型
os.environ['HF_DATASETS_OFFLINE'] = '1' # 数据$ HF_DATASETS_OFFLINE=1 TRANSFORMERS_OFFLINE=1 python examples/pytorch/translation/run_translation.py --model_name_or_path t5-small --dataset_name wmt16 --dataset_config ro-en ...