31 lines
968 B
JSON
31 lines
968 B
JSON
|
|
{
|
||
|
|
"noesis_version": "v14.1",
|
||
|
|
"framework": "DHCF-FNO",
|
||
|
|
"founder": "Ilia Bolotnikov",
|
||
|
|
"organization": "AMAImedia.com",
|
||
|
|
"source_model": "nvidia/Nemotron-Orchestrator-8B",
|
||
|
|
"source_format": "FP32 safetensors",
|
||
|
|
"source_license": "NVIDIA Open Model License (research and development only)",
|
||
|
|
"base_model": "Qwen/Qwen3-8B",
|
||
|
|
"architecture": "Qwen3ForCausalLM (dense decoder-only, NO MoE)",
|
||
|
|
"vocab_size": 151936,
|
||
|
|
"language": "en",
|
||
|
|
"quantization": {
|
||
|
|
"method": "AWQ",
|
||
|
|
"library": "autoawq 0.2.9",
|
||
|
|
"zero_point": true,
|
||
|
|
"q_group_size": 128,
|
||
|
|
"w_bit": 4,
|
||
|
|
"version": "GEMM"
|
||
|
|
},
|
||
|
|
"calibration": {
|
||
|
|
"samples": 128,
|
||
|
|
"max_seq_len": 512,
|
||
|
|
"domain": "orchestration_tool_calling_english",
|
||
|
|
"source": "in-memory synthetic prompts (NOESIS internal)"
|
||
|
|
},
|
||
|
|
"specialist_role": "M9-ORCH (English orchestration teacher)",
|
||
|
|
"kd_weight_proposed": 0.22,
|
||
|
|
"rng_seed": 1729,
|
||
|
|
"torch_dtype_quant": "bfloat16"
|
||
|
|
}
|