人工智能
from transformers import TextDataset
def tokenize(batch):
return tokenizer(batch['text'], padding=True, truncation=True)
train_dataset = TextDataset("path/to/train_data", tokenize)
val_dataset = TextDataset("path/to/val_data", tokenize)
全部0条评论
快来发表一下你的评论吧 !