diff --git a/config.json b/config.json
new file mode 100644
index 0000000..b54f5dc
--- /dev/null
+++ b/config.json
@@ -0,0 +1,23 @@
+{
+ "_name_or_path": "MiniMA-3B",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "hidden_act": "silu",
+ "hidden_size": 3072,
+ "initializer_range": 0.02,
+ "intermediate_size": 8192,
+ "max_position_embeddings": 4096,
+ "model_type": "llama",
+ "num_attention_heads": 24,
+ "num_hidden_layers": 24,
+ "pad_token_id": 0,
+ "rms_norm_eps": 1e-05,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float16",
+ "transformers_version": "4.30.2",
+ "use_cache": true,
+ "vocab_size": 49216
+}
diff --git a/pytorch_model.bin b/pytorch_model.bin
new file mode 100644
index 0000000..cf0c08c
--- /dev/null
+++ b/pytorch_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e64e665a7d3fb59f34fe7c9bfe6e0e5ad69413b803b7ac6db21c60acdfde234
+size 6040967525
diff --git a/special_tokens_map.json b/special_tokens_map.json
new file mode 100644
index 0000000..d85ba6c
--- /dev/null
+++ b/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/tokenizer.model b/tokenizer.model
new file mode 100644
index 0000000..14cb1e6
--- /dev/null
+++ b/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ae87c0db2b21b0fa3fdc5e19d1f9cea94efb703cc7c6281d8718a6714b3cc2be
+size 748869
diff --git a/tokenizer_config.json b/tokenizer_config.json
new file mode 100644
index 0000000..64f06a6
--- /dev/null
+++ b/tokenizer_config.json
@@ -0,0 +1,34 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "bos_token": {
+ "__type": "AddedToken",
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "clean_up_tokenization_spaces": false,
+ "eos_token": {
+ "__type": "AddedToken",
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": null,
+ "sp_model_kwargs": {},
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": {
+ "__type": "AddedToken",
+ "content": "",
+ "lstrip": false,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "use_fast": true
+}