From 243b671a44411a839c0d8f9793b6f9bd3e628d16 Mon Sep 17 00:00:00 2001 From: Severian Date: Sun, 8 Oct 2023 18:44:43 +0000 Subject: [PATCH] Upload adapter_config.json --- adapter_config.json | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 adapter_config.json diff --git a/adapter_config.json b/adapter_config.json new file mode 100644 index 0000000..216fb4a --- /dev/null +++ b/adapter_config.json @@ -0,0 +1,23 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "teknium/CollectiveCognition-v1.1-Mistral-7B", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 32, + "lora_dropout": 0.1, + "modules_to_save": null, + "peft_type": "LORA", + "r": 16, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "q_proj", + "v_proj" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file