Files
Qwen3-32B-T-pro-it-2.1-NOES…/noesis_provenance.json
ModelHub XC 9666f81410 初始化项目,由ModelHub XC社区提供模型
Model: AMAImedia/Qwen3-32B-T-pro-it-2.1-NOESIS-AWQ-INT4
Source: Original Platform
2026-05-01 23:24:18 +08:00

41 lines
1.1 KiB
JSON

{
"noesis_version": "v14.1",
"framework": "DHCF-FNO",
"founder": "Ilia Bolotnikov",
"organization": "AMAImedia.com",
"source_model": "t-tech/T-pro-it-2.1",
"source_format": "BF16 safetensors",
"source_license": "Apache-2.0",
"base_model": "Qwen/Qwen3-32B",
"architecture": "Qwen3ForCausalLM (DENSE decoder-only, NOT MoE; 3 GRPO experts SLERP-merged offline into single weights)",
"vocab_size": 151936,
"languages": [
"ru",
"en",
"uk",
"be"
],
"quantization": {
"method": "AWQ",
"library": "autoawq 0.2.9",
"zero_point": true,
"q_group_size": 128,
"w_bit": 4,
"version": "GEMM"
},
"calibration": {
"samples": 128,
"max_seq_len": 512,
"domain_mix": {
"ru": 0.7,
"en": 0.2,
"code": 0.1
},
"source": "in-memory synthetic prompts (NOESIS internal)"
},
"specialist_role": "M2-DUB-LM (RU), M4-CHAT (RU), M9-ORCH (RU)",
"kd_weight_proposed": 0.18,
"kd_shard": "russian_only",
"rng_seed": 1729,
"torch_dtype_quant": "bfloat16"
}