Files
frankencria-llama2-12.5b-v1…/model.safetensors.index.json

1 line
42 KiB
JSON
Raw Permalink Normal View History

{"metadata": {"mergekit_version": "0.0.4.1"}, "weight_map": {"model.layers.47.mlp.down_proj.weight": "model-00001-of-00013.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00001-of-00013.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00001-of-00013.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00001-of-00013.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00001-of-00013.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00001-of-00013.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00001-of-00013.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00001-of-00013.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00001-of-00013.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00001-of-00013.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00001-of-00013.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00001-of-00013.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00001-of-00013.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00001-of-00013.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00001-of-00013.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00001-of-00013.safetensors", "model.layers.47.input_layernorm.weight": "model-00001-of-00013.safetensors", "model.layers.39.input_layernorm.weight": "model-00001-of-00013.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00001-of-00013.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00001-of-00013.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00001-of-00013.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00001-of-00013.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00001-of-00013.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00001-of-00013.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00001-of-00013.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00001-of-00013.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00001-of-00013.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00001-of-00013.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00001-of-00013.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00001-of-00013.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00001-of-00013.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00001-of-00013.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00001-of-00013.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00001-of-00013.safetensors", "model.layers.46.input_layernorm.weight": "model-00001-of-00013.safetensors", "model.layers.38.input_layernorm.weight": "model-00001-of-00013.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00001-of-00013.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00001-of-00013.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00001-of-00013.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00001-of-00013.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00002-of-00013.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00002-of-00013.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00002-of-00013.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00002-of-00013.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00002-of-00013.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00002-of-00013.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00002-of-00013.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00002-of-00013.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00002-of-00013.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00002-of-00013.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00002-of-00013.safetensors", "model.layers.37.self_attn.