83 lines
3.5 KiB
Python
83 lines
3.5 KiB
Python
from transformers import AutoModelForSequenceClassification
|
|
|
|
from transformers import AutoModel, AutoConfig
|
|
|
|
|
|
|
|
# Transformers version: 4.38.2
|
|
import torch
|
|
|
|
def convert_model(model_path,new_model_name):
|
|
|
|
# مسیر مدل Hugging Face
|
|
huggingface_model_path = model_path
|
|
|
|
# مسیر مدل PyTorch که میخواهید مدل را در آن ذخیره کنید
|
|
pytorch_model_path = "./data/" + new_model_name + ".pt"
|
|
|
|
##config = AutoConfig.from_pretrained(huggingface_model_path)
|
|
# بارگذاری مدل از فرمت Hugging Face
|
|
##model = AutoModel.from_pretrained(huggingface_model_path, config=config)
|
|
|
|
|
|
# Load the model
|
|
config = AutoModelForSequenceClassification.from_pretrained(huggingface_model_path)
|
|
model = AutoModelForSequenceClassification.from_pretrained(huggingface_model_path, config=config)
|
|
|
|
# Save the model
|
|
# model.save_pretrained(pytorch_model_path, output_format="torch")
|
|
|
|
# ذخیره مدل در فرمت PyTorch
|
|
# save_pretrained(model, pytorch_model_path)
|
|
torch.save(model, pytorch_model_path)
|
|
# config.save_pretrained("./data/")
|
|
print('Model saved!')
|
|
|
|
path = "G:\\Projects\\NLP\\Flair_NER\\models backup\\models--xlm-roberta-base\\snapshots\\e73636d4f797dec63c3081bb6ed5c7b0bb3f2089"
|
|
# path = "G:/Projects/NLP\Flair_NER/data/xlm-roberta-base"
|
|
|
|
# from transformers import XLMRobertaForSequenceClassification
|
|
# # بارگیری مدل
|
|
# model = XLMRobertaForSequenceClassification.from_pretrained(path,use_crf=True)
|
|
# # ذخیره مدل به فرمت پایتورچ
|
|
# # model.save_pytorch("./data/xlm-roberta-base.pt")#, output_format="torch"
|
|
# torch.save(model.state_dict(), "./data/xlm-roberta-base.pt")
|
|
|
|
# from transformers import AutoModel, AutoTokenizer, AutoConfig, T5Config
|
|
# loaded_model_path = "HooshvareLab-bert-fa-base-uncased-finetuned-2"
|
|
# loaded_model_path_out = "output/HooshvareLab-bert-fa-base-uncased-finetuned-2-pt"
|
|
# config = AutoConfig.from_pretrained(loaded_model_path)
|
|
# auto_model = AutoModel.from_pretrained(loaded_model_path, config=config, from_tf=True)
|
|
# auto_model.save_pretrained(loaded_model_path_out)
|
|
|
|
############################################################################
|
|
loaded_model_path = "./Models"
|
|
loaded_model_path_out = "./data/bert-base-multilingual-cased.pt"
|
|
from transformers import AutoModel, AutoTokenizer, AutoConfig, T5Config
|
|
config = AutoConfig.from_pretrained(loaded_model_path)
|
|
auto_model = AutoModel.from_pretrained(loaded_model_path, config=config)# from_tf=True
|
|
auto_model.save_pretrained(loaded_model_path_out)#, output_format="torch"
|
|
############################################################################
|
|
|
|
# # مسیر مدل با پسوند safetensors
|
|
# safetensors_model_path = "G:\\Projects\\NLP\\Flair_NER\\models backup\\models--xlm-roberta-base\\snapshots\\e73636d4f797dec63c3081bb6ed5c7b0bb3f2089/model.safetensors"
|
|
|
|
# # بارگذاری مدل safetensors
|
|
# safetensors_model = torch.load(safetensors_model_path, map_location='cpu')
|
|
|
|
# # ذخیره مدل به صورت PyTorch با پسوند .pt
|
|
# torch.save(safetensors_model, "./data/model__1.pt")
|
|
|
|
############################################################################
|
|
# from torch.jit import load
|
|
# # بارگیری مدل SafeTensors
|
|
# # model = load("./Models")
|
|
# model = torch.load("./data/Models")
|
|
# # تبدیل مدل به PT
|
|
# model_pt = model.to_pt()
|
|
# # ذخیره مدل PT
|
|
# torch.save(model_pt, "model000001.pt")
|
|
############################################################################
|
|
|
|
|
|
convert_model("./Models","xlm-roberta-base") |