Delete adapter_config.json
This commit is contained in:
committed by
huggingface-web
parent
e4aa635c18
commit
89870f23d1
@@ -1,23 +0,0 @@
|
|||||||
{
|
|
||||||
"alpha_pattern": {},
|
|
||||||
"auto_mapping": null,
|
|
||||||
"base_model_name_or_path": "teknium/CollectiveCognition-v1.1-Mistral-7B",
|
|
||||||
"bias": "none",
|
|
||||||
"fan_in_fan_out": false,
|
|
||||||
"inference_mode": true,
|
|
||||||
"init_lora_weights": true,
|
|
||||||
"layers_pattern": null,
|
|
||||||
"layers_to_transform": null,
|
|
||||||
"lora_alpha": 32,
|
|
||||||
"lora_dropout": 0.1,
|
|
||||||
"modules_to_save": null,
|
|
||||||
"peft_type": "LORA",
|
|
||||||
"r": 16,
|
|
||||||
"rank_pattern": {},
|
|
||||||
"revision": null,
|
|
||||||
"target_modules": [
|
|
||||||
"q_proj",
|
|
||||||
"v_proj"
|
|
||||||
],
|
|
||||||
"task_type": "CAUSAL_LM"
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user