test_models_classification/bloom.py

41 lines
927 B
Python

from transformers import AutoModelForCausalLM, AutoTokenizer, set_seed
import torch
import random
import datetime
def printNow():
now = datetime.datetime.now()
print(now)
def answer(prompt):
printNow()
seed = random.randint(1,99999)
print(seed)
set_seed(seed)
input_ids = tokenizer(prompt, return_tensors="pt").to(0)
sample = model.generate(**input_ids, max_length=100, repetition_penalty = 2.0)
print(tokenizer.decode(sample[0], ))
printNow()
print('loading...')
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
torch.set_default_device(device)
#torch.set_default_tensor_type(torch.cpu.FloatTensor)
model = AutoModelForCausalLM.from_pretrained("bigscience/bloom-560m", use_cache=True)
tokenizer = AutoTokenizer.from_pretrained("bigscience/bloom-560m")
sentence = ''
while sentence != 'exit':
sentence = input("oui ?")
answer(sentence)