You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

17 lines
519 B

import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
import os
cache_dir = "./models"
model_id = "NousResearch/Meta-Llama-3.1-8B"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(
model_id,
cache_dir=cache_dir,
torch_dtype=torch.float32, # Использование float32 для CPU
device_map=None # Явное указание, что модель не будет использовать GPU
)
model.to("cpu")