|
|
|
@ -2,7 +2,7 @@ import torch
|
|
|
|
|
import logging
|
|
|
|
|
from transformers import BitsAndBytesConfig
|
|
|
|
|
|
|
|
|
|
from transformers import AutoModelForCausalLM, AutoTokenizer #,# BitsAndBytesConfig
|
|
|
|
|
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
|
|
|
|
|
|
|
class HuggingFaceLLM:
|
|
|
|
|
def __init__(self, model_id: str, device: str = None, max_length: int = 20, quantize: bool = False, quantization_config: dict = None):
|
|
|
|
|