diff --git a/README.md b/README.md index c6fc8fb..5c22f27 100644 --- a/README.md +++ b/README.md @@ -42,8 +42,8 @@ pip install --upgrade autoawq autoawq-kernels from awq import AutoAWQForCausalLM from transformers import AutoTokenizer, TextStreamer -model_path = "solidrust/Lexi-Llama-3-8B-Uncensored-AWQ" -system_message = "You are Lexi-Llama-3-8B-Uncensored, incarnated as a powerful AI. You were created by Orenguteng." +model_path = "solidrust/Llama-3-8B-Lexi-Uncensored-AWQ" +system_message = "You are Llama-3-8B-Lexi-Uncensored, incarnated as a powerful AI. You were created by Orenguteng." # Load model model = AutoAWQForCausalLM.from_quantized(model_path,