Flair_NER/train.py
2025-08-07 16:46:37 +03:30

219 lines
8.3 KiB
Python

learning_rate = 0.65e-4 # 0.65e-4 - 0.4e-4
mini_batch_size = 8
max_epochs = 100
from funcs import save_to_file_by_address
import json
import os
import datetime
from pathlib import Path
from flair.data import Corpus
from flair.datasets import ColumnCorpus
from flair.embeddings import TransformerWordEmbeddings
from flair.models import SequenceTagger
from flair.trainers import ModelTrainer
from flair.models import SequenceTagger
from flair.embeddings import TransformerDocumentEmbeddings
#model = os.getcwd() + "\\data\\final-model.pt"
#model = os.getcwd() + "/data/HooshvareLab--distilbert-fa-zwnj-base-ner" # مدل اولیه که تست شد و تا حدود 70 درصد در آخرین آموزش خوب جواب می داد
#model = os.getcwd() + "/data/distilbert-base-multilingual-cased-tavasi"
# model = "HooshvareLab/bert-fa-base-uncased-ner-peyma"
# model = "PooryaPiroozfar/Flair-Persian-NER" # 111111111111111
## ---------------------------------------------------------
## --- آخرین کار مورد استفاده در سامانه قانون یار از این آموزش دیده است
#model = "orgcatorg/xlm-v-base-ner" # بهترین توکنایزر فارسی ***********************
## ---------------------------------------------------------
# model = AutoModel.from_pretrained("/home/gpu/HFHOME/hub/models--orgcatorg--xlm-v-base-ner")
#model = "pourmand1376/NER_Farsi" #
#model = "HooshvareLab/bert-base-parsbert-ner-uncased" # **** خوب جواب داد
#model = "SeyedAli/Persian-Text-NER-Bert-V1" # ***** خیلی خوب جواب داد
#model = "HooshvareLab/bert-base-parsbert-peymaner-uncased" # جالب نبود!
#model = "HooshvareLab/bert-base-parsbert-armanner-uncased" # جالب نبود!
def digit_correct(input_num):
if input_num <10:
return f'0{input_num}'
return str(input_num)
def main_train(model):
"""
آموزش مدل برای تسک NER
:model نام مدلی که قرار است آموزش داده شود
"""
time = datetime.datetime.now()
model_title = f"{time.year}-{digit_correct(time.month)}-{digit_correct(time.day)}--{digit_correct(time.hour)}-{digit_correct(time.minute)}-{digit_correct(time.second)}--{model}".replace('/','--')
print(f'\nMODEL:: {model}\n')
#!pip install 'flair==0.10'
# define columns
columns = {0 : 'text', 1 : 'ner'}
# directory where the data resides
data_folder = './data/'
# initializing the corpuscorpus = ColumnCorpus(data_folder, columns, train_file='peyma_train.txt', sequence_length=512)
#اسم دیتاست اینجا تنظیم شود
corpus = ColumnCorpus(data_folder, columns,
#train_file = 'peyma_train.txt')
train_file = 'DATASET140402_no_aref2.txt', # qavanin 36K tokens
# test_file = 'test_ds_new.txt', # test 110 sections - 6.7K
#dev_file = 'dev split 2.txt'
#max_sentence_length=500
)
# tag to predict
tag_type = 'ner'
# make tag dictionary from the corpus
tag_dictionary = corpus.make_label_dictionary(label_type=tag_type)
#xlm-roberta-large
# embeddings = TransformerWordEmbeddings(model='HooshvareLab/distilbert-fa-zwnj-base-ner',
embeddings = TransformerWordEmbeddings(model= model,
layers="-1",
subtoken_pooling="first",
# pooling='mean',
fine_tune=True,
use_context=True,
from_tf=True,
allow_long_sentences=True
# model_max_length=512,
)
print('model read successfully !')
try:
tagger = SequenceTagger(hidden_size=256,
embeddings=embeddings,
tag_dictionary= tag_dictionary,
tag_type='ner',
use_crf=False,
use_rnn=False,
reproject_embeddings=False
)
except Exception as e:
print(str(e.args[0]))
return
from flair.trainers import ModelTrainer
try:
trainer = ModelTrainer(tagger, corpus)
#resources/taggers/sota-ner-flert
# trainer.fine_tune('./taggers',
# learning_rate=2.0e-6,
# mini_batch_size=16,
# # mini_batch_chunk_size=1, # remove this parameter to speed up computation if you have a big GPU
# max_epochs=20
# )
except Exception as e:
print(str(e.args[0]))
return
try:
result = trainer.fine_tune(f"./taggers/{model_title}",
learning_rate= learning_rate,
mini_batch_size= mini_batch_size,
max_epochs= max_epochs
)
except Exception as e:
print(str(e.args[0]))
return
try:
from train_log_plotter import plot_diagram
plot_diagram(model_title)
except:
print('log diagram failed due to error!')
print('fine-tune operation finished')
operation_time = datetime.datetime.now()
print(f'operation_time: {operation_time}')
# ###################################################
# تست مدل بر یک مقدار مشخص شده
print(' Try to test trained model! ')
try:
from inference import inference_main
inference_main(f"./taggers/{model_title}",'')
except:
print(' Testing model Error! ')
print(' Testing model finished! ')
# ###################################################
# ارزیابی مدل آموزش دیده
try:
from evaluate_model import do_evaluate
print(' Try to evaluating the trained model! ')
evaluate_result = do_evaluate(f"./taggers/{model_title}/final-model.pt")
print(' Evaluating finished! ')
except Exception as e:
print('do_evaluate function failed')
evaluate_result = f"do_evaluate function failed!\nerror massage:\n{str(e.args[0])}"
train_result = f'''************************************************\n
##### TRAIN RESULT #####
F1 Score: {result}
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n'''
hyperparameters = f"""learning_rate: {learning_rate} - mini_batch_size: {mini_batch_size} - max_epochs: {max_epochs}"""
final_result = f"""Model Name: {model}
Fine-Tune Parameters: {hyperparameters}
{train_result}
{evaluate_result}\n
Fine_Tune time: {operation_time}
------------------------------------------------------------------------------------
------------------------------------------------------------------------------------\n
"""
save_to_file_by_address(f'./taggers/{model_title}/test-result.txt', final_result)
return True
models = """
HooshvareLab/bert-base-parsbert-ner-uncased
HooshvareLab/bert-fa-base-uncased-ner-peyma
HooshvareLab/bert-base-parsbert-armanner-uncased
HooshvareLab/bert-fa-base-uncased-ner-arman
HooshvareLab/bert-base-parsbert-peymaner-uncased
"""
models = """
HooshvareLab/bert-fa-base-uncased-ner-peyma
"""
# HooshvareLab/distilbert-fa-zwnj-base-ner
models_with_error= """
nicolauduran45/affilgood-ner-multilingual-v2 - error
Amirmerfan/bert-base-uncased-persian-ner-50k-base - error
AliFartout/Roberta-fa-en-ner - error
"""
model = 'HooshvareLab/bert-fa-base-uncased-ner-peyma'
if __name__ == "__main__":
# model = 'HooshvareLab/bert-fa-base-uncased-ner-peyma'
# main_train(model)
# iterate models to train
for model in models.split('\n'):
if model == '':
continue
print(f" ... try to TRAIN ** {model} ** Model ... ")
try:
result = main_train(model)
if result:
print(f'TRAIN **{model}** Finished successfully')
except:
print(f" !!! TRAIN **{model}** Model ERROR !!! ")
print('All Models Training Process Finished!')