commit 21d6cc13372d7b8d33df81fc9ecb6185a5a438fd Author: ModelHub XC Date: Thu Apr 9 13:33:23 2026 +0800 初始化项目,由ModelHub XC社区提供模型 Model: RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf Source: Original Platform diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..9438790 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,54 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q2_K.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q3_K_S.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q3_K.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q3_K_M.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q3_K_L.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.IQ4_XS.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q4_0.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.IQ4_NL.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q4_K_S.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q4_K.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q4_1.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q5_0.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q5_K_S.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q5_K.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q5_1.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q6_K.gguf filter=lfs diff=lfs merge=lfs -text +gemma-2b-mt-German-to-English.Q8_0.gguf filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md new file mode 100644 index 0000000..7cdfde0 --- /dev/null +++ b/README.md @@ -0,0 +1,77 @@ +Quantization made by Richard Erkhov. + +[Github](https://github.com/RichardErkhov) + +[Discord](https://discord.gg/pvy7H8DZMG) + +[Request more models](https://github.com/RichardErkhov/quant_request) + + +gemma-2b-mt-German-to-English - GGUF +- Model creator: https://huggingface.co/Samvardhan777/ +- Original model: https://huggingface.co/Samvardhan777/gemma-2b-mt-German-to-English/ + + +| Name | Quant method | Size | +| ---- | ---- | ---- | +| [gemma-2b-mt-German-to-English.Q2_K.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q2_K.gguf) | Q2_K | 1.08GB | +| [gemma-2b-mt-German-to-English.Q3_K_S.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q3_K_S.gguf) | Q3_K_S | 1.2GB | +| [gemma-2b-mt-German-to-English.Q3_K.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q3_K.gguf) | Q3_K | 1.29GB | +| [gemma-2b-mt-German-to-English.Q3_K_M.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q3_K_M.gguf) | Q3_K_M | 1.29GB | +| [gemma-2b-mt-German-to-English.Q3_K_L.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q3_K_L.gguf) | Q3_K_L | 1.36GB | +| [gemma-2b-mt-German-to-English.IQ4_XS.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.IQ4_XS.gguf) | IQ4_XS | 1.4GB | +| [gemma-2b-mt-German-to-English.Q4_0.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q4_0.gguf) | Q4_0 | 1.44GB | +| [gemma-2b-mt-German-to-English.IQ4_NL.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.IQ4_NL.gguf) | IQ4_NL | 1.45GB | +| [gemma-2b-mt-German-to-English.Q4_K_S.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q4_K_S.gguf) | Q4_K_S | 1.45GB | +| [gemma-2b-mt-German-to-English.Q4_K.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q4_K.gguf) | Q4_K | 1.52GB | +| [gemma-2b-mt-German-to-English.Q4_K_M.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q4_K_M.gguf) | Q4_K_M | 1.52GB | +| [gemma-2b-mt-German-to-English.Q4_1.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q4_1.gguf) | Q4_1 | 1.56GB | +| [gemma-2b-mt-German-to-English.Q5_0.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q5_0.gguf) | Q5_0 | 1.68GB | +| [gemma-2b-mt-German-to-English.Q5_K_S.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q5_K_S.gguf) | Q5_K_S | 1.68GB | +| [gemma-2b-mt-German-to-English.Q5_K.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q5_K.gguf) | Q5_K | 1.71GB | +| [gemma-2b-mt-German-to-English.Q5_K_M.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q5_K_M.gguf) | Q5_K_M | 1.71GB | +| [gemma-2b-mt-German-to-English.Q5_1.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q5_1.gguf) | Q5_1 | 1.79GB | +| [gemma-2b-mt-German-to-English.Q6_K.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q6_K.gguf) | Q6_K | 1.92GB | +| [gemma-2b-mt-German-to-English.Q8_0.gguf](https://huggingface.co/RichardErkhov/Samvardhan777_-_gemma-2b-mt-German-to-English-gguf/blob/main/gemma-2b-mt-German-to-English.Q8_0.gguf) | Q8_0 | 2.49GB | + + + + +Original model description: +--- +license: mit +language: +- de +- en +pipeline_tag: translation +tags: +- text-generation-inference +--- + + +# Description + +## Gemma 2B German to English v0.1 Alpha [Experimental Release] +This is a german instruction finetuned version of Google's Gemma 2B model. This is an experiment to see if Gemma can be Translate German to English by expanding vocabulary. While the responses may be rusty at times, it shows a lot of promise for a 2B parameter model. + + + + +--- +# Model description 🗄️: + Model type: A 2B parameter GPT-like model finetuned on 100,000 samples consisting of an equal proportion of English and German samples. + + Language(s): Bilingual. English and German. + + License: Google Gemma Terms of Use + + Finetuned from model: Samvardhan777/gemma-2b-mt-German-to-English + + Training Precision: bfloat16 + + Training Hardware: Free Google Colab + + Dataset: kaitchup/opus-German-to-English + +--- + diff --git a/gemma-2b-mt-German-to-English.IQ4_NL.gguf b/gemma-2b-mt-German-to-English.IQ4_NL.gguf new file mode 100644 index 0000000..16c6da6 --- /dev/null +++ b/gemma-2b-mt-German-to-English.IQ4_NL.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89ca4b99fbd1a6e92b86277bf9863855c04c198097057774fb8855be7bdc043b +size 1560758496 diff --git a/gemma-2b-mt-German-to-English.IQ4_XS.gguf b/gemma-2b-mt-German-to-English.IQ4_XS.gguf new file mode 100644 index 0000000..1cb10b9 --- /dev/null +++ b/gemma-2b-mt-German-to-English.IQ4_XS.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:353eaf334405a387328d5c8672b15a401cf9649595dd3f603c4d48836ffed06c +size 1501219040 diff --git a/gemma-2b-mt-German-to-English.Q2_K.gguf b/gemma-2b-mt-German-to-English.Q2_K.gguf new file mode 100644 index 0000000..e466397 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q2_K.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a1bedd8de3057cefda838cdef7f744b4b37cac39b99f712586cddbb273f8321 +size 1157925088 diff --git a/gemma-2b-mt-German-to-English.Q3_K.gguf b/gemma-2b-mt-German-to-English.Q3_K.gguf new file mode 100644 index 0000000..a8b2f00 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q3_K.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e56ae14c587043f741ed759863fa4aa2676ee40571ad2d4170069f324123d032 +size 1383803104 diff --git a/gemma-2b-mt-German-to-English.Q3_K_L.gguf b/gemma-2b-mt-German-to-English.Q3_K_L.gguf new file mode 100644 index 0000000..794d498 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q3_K_L.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:931d4db0e229993143d0fec1a340a4bfad18da8d7e8a5e05d53e2af2e4e4db0b +size 1465592032 diff --git a/gemma-2b-mt-German-to-English.Q3_K_M.gguf b/gemma-2b-mt-German-to-English.Q3_K_M.gguf new file mode 100644 index 0000000..a8b2f00 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q3_K_M.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e56ae14c587043f741ed759863fa4aa2676ee40571ad2d4170069f324123d032 +size 1383803104 diff --git a/gemma-2b-mt-German-to-English.Q3_K_S.gguf b/gemma-2b-mt-German-to-English.Q3_K_S.gguf new file mode 100644 index 0000000..e05426b --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q3_K_S.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a46bc9b5a516319d9b6e162c942e1af254369094109515211f43cbaa73c0ced +size 1287981280 diff --git a/gemma-2b-mt-German-to-English.Q4_0.gguf b/gemma-2b-mt-German-to-English.Q4_0.gguf new file mode 100644 index 0000000..ed77b52 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q4_0.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a09b83b3fe0ef136fb65fa3670c4cb2d416bef9786dbe54b69f2383cc6f40272 +size 1551190240 diff --git a/gemma-2b-mt-German-to-English.Q4_1.gguf b/gemma-2b-mt-German-to-English.Q4_1.gguf new file mode 100644 index 0000000..ec9e863 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q4_1.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a9a5df15645b0b863c5f3e834abea043fd8c6eabe9ada0ec8a7a8045778c027 +size 1675053280 diff --git a/gemma-2b-mt-German-to-English.Q4_K.gguf b/gemma-2b-mt-German-to-English.Q4_K.gguf new file mode 100644 index 0000000..073c3df --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q4_K.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c507abfc3abf33eaa88ae51670b0c3da3feebb44f70cf585dd04fb0640d63fb0 +size 1630263520 diff --git a/gemma-2b-mt-German-to-English.Q4_K_M.gguf b/gemma-2b-mt-German-to-English.Q4_K_M.gguf new file mode 100644 index 0000000..073c3df --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q4_K_M.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c507abfc3abf33eaa88ae51670b0c3da3feebb44f70cf585dd04fb0640d63fb0 +size 1630263520 diff --git a/gemma-2b-mt-German-to-English.Q4_K_S.gguf b/gemma-2b-mt-German-to-English.Q4_K_S.gguf new file mode 100644 index 0000000..ab00652 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q4_K_S.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a591d2024b6684d9542c7b3dd65b3476c62de06bfa78a739d30ee0d030b2529 +size 1559840992 diff --git a/gemma-2b-mt-German-to-English.Q5_0.gguf b/gemma-2b-mt-German-to-English.Q5_0.gguf new file mode 100644 index 0000000..4d58a7f --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q5_0.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:caae2b0c30765b17b55100b8761de8ca4ba6e49488149ccdfc78782d3f3bc509 +size 1798916320 diff --git a/gemma-2b-mt-German-to-English.Q5_1.gguf b/gemma-2b-mt-German-to-English.Q5_1.gguf new file mode 100644 index 0000000..49bafec --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q5_1.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9dfb9b8013f0c52b5ac4cfd079f3f3dc8f3149ab0fae179fa49986cae642873b +size 1922779360 diff --git a/gemma-2b-mt-German-to-English.Q5_K.gguf b/gemma-2b-mt-German-to-English.Q5_K.gguf new file mode 100644 index 0000000..c2140d6 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q5_K.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77f1be2b4fd28a42df57af5afd917574559aa141935e6e78cdaddcfc99482b1f +size 1839651040 diff --git a/gemma-2b-mt-German-to-English.Q5_K_M.gguf b/gemma-2b-mt-German-to-English.Q5_K_M.gguf new file mode 100644 index 0000000..c2140d6 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q5_K_M.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77f1be2b4fd28a42df57af5afd917574559aa141935e6e78cdaddcfc99482b1f +size 1839651040 diff --git a/gemma-2b-mt-German-to-English.Q5_K_S.gguf b/gemma-2b-mt-German-to-English.Q5_K_S.gguf new file mode 100644 index 0000000..f5041b8 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q5_K_S.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ba1c988e2c3e30ef6031cc4a9259b748fc203202554d4e0187effb03a5868de +size 1798916320 diff --git a/gemma-2b-mt-German-to-English.Q6_K.gguf b/gemma-2b-mt-German-to-English.Q6_K.gguf new file mode 100644 index 0000000..67b55b5 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q6_K.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64c35839f0bea6ac0337719a86dd798e0076689377f78cc63070fd9318b143bb +size 2062125280 diff --git a/gemma-2b-mt-German-to-English.Q8_0.gguf b/gemma-2b-mt-German-to-English.Q8_0.gguf new file mode 100644 index 0000000..bed9966 --- /dev/null +++ b/gemma-2b-mt-German-to-English.Q8_0.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9935a3ba8cd98be821a658dc045bcbc079477a855bfbf779de8e6e872e61d47c +size 2669070560