diff --git a/config.json b/config.json index 2a8e523..b9ce089 100644 --- a/config.json +++ b/config.json @@ -1,5 +1,5 @@ { - "_name_or_path": "checkpoints/Llama-2-7b-hf", + "_name_or_path": "wenge-research/yayi-7b-llama2", "architectures": [ "LlamaForCausalLM" ], @@ -20,7 +20,7 @@ "rope_scaling": null, "tie_word_embeddings": false, "torch_dtype": "bfloat16", - "transformers_version": "4.28.1", + "transformers_version": "4.31.0", "use_cache": false, "vocab_size": 32005 } diff --git a/pytorch_model.bin.index.json b/pytorch_model.bin.index.json index 4c15a63..9cd5843 100644 --- a/pytorch_model.bin.index.json +++ b/pytorch_model.bin.index.json @@ -1,6 +1,6 @@ { "metadata": { - "total_size": 13476917248 + "total_size": 13476921344 }, "weight_map": { "lm_head.weight": "pytorch_model-00002-of-00002.bin",