Upload gemma-2-9b-it-Q3_K_XL.gguf with huggingface_hub

This commit is contained in:
ai-modelscope
2024-07-27 01:39:24 +08:00
parent 09610af741
commit 6576821a94
94 changed files with 207 additions and 79 deletions

7
.gitattributes vendored
View File

@@ -56,3 +56,10 @@ gemma-2-9b-it-Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-Q8_0_L.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-f32.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it.imatrix filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-Q2_K_L.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-Q3_K_XL.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-Q6_K-Q8.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-Q6_K-f32.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-Q3_K_L-Q8.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-Q4_K_M-fp16.gguf filter=lfs diff=lfs merge=lfs -text
gemma-2-9b-it-Q8_0-f16.gguf filter=lfs diff=lfs merge=lfs -text

View File

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
cdc07b304b8c83da471390e12a15ca3a57aa0894
1722008559.6692302

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
e152459c333bc687d3829e551e9c9c3f51abc087ba9f87d82033847e48db3df4
1722008663.1916308

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
0725ca79e7415d9a23d0cdcd3f2376c1b989777dd07cd8c088af23bb4fc1fa28
1722008663.2204618

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
010fb98c74fd5b413008adb14f62a0a6886883936dbc5693f45fd389b54bfd40
1722008591.1106663

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
d117938c5ebd91f0cbe1962236d04b42a716b03ebf54560196773f51bd1b2acb
1722008663.217857

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
379fb78fa6453222a54d85ed04cb376663ccc838347412eccab587290c8df1b9
1722008663.261843

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
8a57501217fee57586ec916fe196cce8e0f98d9a493eb817f3f27ea75b4904c9
1722008663.2493024

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
c79966f200495368e8d1d4fecefbfdd0176327ec76968522c6fe13c397ad5d58
1722008663.2340443

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
2efce51c0d774a47c247c07ec6f109f670e3da4f4eec0f9322d72e3635622403
1722008663.1963668

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
0c0c08d499a7a491f50bf68c6b925a27134a98f6a1fbcfbdeec558297884ea72
1722008663.2521818

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
b8a45c72fee34ade31f7fcd2eec5e7eaafb51d7e1e80fe1762eff1a33e83d2fc
1722008778.1372104

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
38fbe0303ac54882c72fd28f636abf731e4bd813809c7a1cffc829980391bcb3
1722008776.062595

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
3b5cb881500b1f72d456784a70214c8e49b7b3f49dae4fe524ca2c19e650b18b
1722008776.73387

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
594a305f4806f982808a07e0271e6e9354e4557c2a1a72383005b41b200830d8
1722008763.8150647

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
6609cc2ebba6311806ee84116af44e7fb26762f116572a8181aad4a5da5c9e95
1722009026.903439

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
ed8573f0d1e3ca58e471498fd162a65d4f4253dffdec28a7ba394fdcae92f1f3
1722008778.7330132

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
0452e54ba9b6c5b2e466fbd7f28c089fefb7c3dceddcc2b75becab6b16cae523
1722008794.5926306

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
13b2a7b4115bbd0900162edcebe476da1ba1fc24e718e8b40d32f6e300f56dfe
1722008777.381287

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
61fd8af95257d1e39fa3bab7f4e4f7af65812f4fe017a808913f3c1c7151d33a
1722008849.6402676

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
fcff2482f87cf4ba5bf0dca01cbde26abc9c77aa0caac0a792636219ce827e75
1722008894.1712573

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
a4b0b55ce809a09baaefb789b0046ac77ecd502aba8aeb2ed63cc237d9f40ce7
1722008894.2503803

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
ca83ab1d6a8df1f2b82ba2889dfeae77b878386ba60b8f55bad8cd370e2cc738
1722008923.0811846

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
4e000b337787bd1cf2f07bbcf31709e6ff4fad7f383156741e674027d4038277
1722008916.3342438

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
8b36d8f25b92b38a3c193f8e382f3df13699db42f65b124948db74b1f0a6b21b
1722008958.2288108

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
70ea6fe1a4a8005ef6a0b379a87d271028cfa1b0be5fa2848e30419653d717a4
1722008913.8530598

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
497f1c006183cd186fb3503d90b80ba9a4518d8f6146a1f8e69615b20511c678
1722009023.2152562

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
5f9cfe355265a0f78c4942e1a6cc6b7113ddb75748f7f1267490abfb34b0abc4
1722009139.0869124

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
07ac9ec01217d7bf2976a3a0dbf564a91b6957f94ab2ad30671eb6100f64bc2b
1722009029.678579

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
e19cb326ba857bec47383ada7af3b0fd965605a44e0a3b6ab0b1e080a541597a
1722009120.7097569

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
ac2d53301d4e91f0c3a1835df213af9334eaccb08be76afd694ec2101b57bdaa
1722010976.5704563

View File

@@ -0,0 +1,3 @@
d731033f3dc4018261fd39896e50984d398b4ac5
40f30bcab948c839ffd9855c98ec76bffb7f5cae8d301d556ee8a96f5832058b
1722008927.2496314

View File

@@ -1,22 +1,21 @@
---
license: gemma
base_model: google/gemma-2-9b-it
library_name: transformers
license: gemma
pipeline_tag: text-generation
extra_gated_heading: Access Gemma on Hugging Face
extra_gated_prompt: >-
To access Gemma on Hugging Face, youre required to review and agree to
Googles usage license. To do this, please ensure youre logged in to Hugging
Face and click below. Requests are processed immediately.
extra_gated_button_content: Acknowledge license
tags:
- conversational
- gemma2
quantized_by: bartowski
extra_gated_heading: Access Gemma on Hugging Face
extra_gated_prompt: To access Gemma on Hugging Face, youre required to review and
agree to Googles usage license. To do this, please ensure youre logged in to Hugging
Face and click below. Requests are processed immediately.
extra_gated_button_content: Acknowledge license
---
## Llamacpp imatrix Quantizations of gemma-2-9b-it
Using <a href="https://github.com/ggerganov/llama.cpp/">llama.cpp</a> PR <a href="https://github.com/ggerganov/llama.cpp/pull/8156">8156</a> for quantization.
Using <a href="https://github.com/ggerganov/llama.cpp/">llama.cpp</a> release <a href="https://github.com/ggerganov/llama.cpp/releases/tag/b3389">b3389</a> for quantization.
Original model: https://huggingface.co/google/gemma-2-9b-it
@@ -25,9 +24,11 @@ All quants made using imatrix option with dataset from [here](https://gist.githu
## Prompt format
```
<bos><start_of_turn>user
<start_of_turn>user
{prompt}<end_of_turn>
<start_of_turn>model
<end_of_turn>
<start_of_turn>model
```
@@ -35,29 +36,34 @@ Note that this model does not support a System prompt.
## Download a file (not the whole branch) from below:
| Filename | Quant type | File Size | Description |
| -------- | ---------- | --------- | ----------- |
| [gemma-2-9b-it-Q8_0_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q8_1.gguf) | Q8_0_L | 10.68GB | *Experimental*, uses f16 for embed and output weights. Please provide any feedback of differences. Extremely high quality, generally unneeded but max available quant. |
| [gemma-2-9b-it-Q8_0.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q8_0.gguf) | Q8_0 | 9.82GB | Extremely high quality, generally unneeded but max available quant. |
| [gemma-2-9b-it-Q6_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q6_K_L.gguf) | Q6_K_L | 8.67GB | *Experimental*, uses f16 for embed and output weights. Please provide any feedback of differences. Very high quality, near perfect, *recommended*. |
| [gemma-2-9b-it-Q6_K.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q6_K.gguf) | Q6_K | 7.58GB | Very high quality, near perfect, *recommended*. |
| [gemma-2-9b-it-Q5_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q5_K_L.gguf) | Q5_K_L | 7.73GB | *Experimental*, uses f16 for embed and output weights. Please provide any feedback of differences. High quality, *recommended*. |
| [gemma-2-9b-it-Q5_K_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q5_K_M.gguf) | Q5_K_M | 6.64GB | High quality, *recommended*. |
| [gemma-2-9b-it-Q5_K_S.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q5_K_S.gguf) | Q5_K_S | 6.48GB | High quality, *recommended*. |
| [gemma-2-9b-it-Q4_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q4_K_L.gguf) | Q4_K_L | 6.84GB | *Experimental*, uses f16 for embed and output weights. Please provide any feedback of differences. Good quality, uses about 4.83 bits per weight, *recommended*. |
| [gemma-2-9b-it-Q4_K_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q4_K_M.gguf) | Q4_K_M | 5.76GB | Good quality, uses about 4.83 bits per weight, *recommended*. |
| [gemma-2-9b-it-Q4_K_S.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q4_K_S.gguf) | Q4_K_S | 5.47GB | Slightly lower quality with more space savings, *recommended*. |
| [gemma-2-9b-it-IQ4_XS.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ4_XS.gguf) | IQ4_XS | 5.18GB | Decent quality, smaller than Q4_K_S with similar performance, *recommended*. |
| [gemma-2-9b-it-Q3_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q3_K_L.gguf) | Q3_K_L | 5.13GB | Lower quality but usable, good for low RAM availability. |
| [gemma-2-9b-it-Q3_K_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q3_K_M.gguf) | Q3_K_M | 4.76GB | Even lower quality. |
| [gemma-2-9b-it-IQ3_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ3_M.gguf) | IQ3_M | 4.49GB | Medium-low quality, new method with decent performance comparable to Q3_K_M. |
| [gemma-2-9b-it-Q3_K_S.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q3_K_S.gguf) | Q3_K_S | 4.33GB | Low quality, not recommended. |
| [gemma-2-9b-it-IQ3_XS.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ3_XS.gguf) | IQ3_XS | 4.14GB | Lower quality, new method with decent performance, slightly better than Q3_K_S. |
| [gemma-2-9b-it-IQ3_XXS.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ3_XXS.gguf) | IQ3_XXS | 3.79GB | Lower quality, new method with decent performance, comparable to Q3 quants. |
| [gemma-2-9b-it-Q2_K.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q2_K.gguf) | Q2_K | 3.80GB | Very low quality but surprisingly usable. |
| [gemma-2-9b-it-IQ2_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ2_M.gguf) | IQ2_M | 3.43GB | Very low quality, uses SOTA techniques to also be surprisingly usable. |
| [gemma-2-9b-it-IQ2_S.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ2_S.gguf) | IQ2_S | 3.21GB | Very low quality, uses SOTA techniques to be usable. |
| [gemma-2-9b-it-IQ2_XS.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ2_XS.gguf) | IQ2_XS | 3.06GB | Very low quality, uses SOTA techniques to be usable. |
| Filename | Quant type | File Size | Split | Description |
| -------- | ---------- | --------- | ----- | ----------- |
| [gemma-2-9b-it-f32.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-f32.gguf) | f32 | 36.97GB | false | Full F32 weights. |
| [gemma-2-9b-it-Q8_0.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q8_0.gguf) | Q8_0 | 9.83GB | false | Extremely high quality, generally unneeded but max available quant. |
| [gemma-2-9b-it-Q6_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q6_K_L.gguf) | Q6_K_L | 7.81GB | false | Uses Q8_0 for embed and output weights. Very high quality, near perfect, *recommended*. |
| [gemma-2-9b-it-Q6_K.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q6_K.gguf) | Q6_K | 7.59GB | false | Very high quality, near perfect, *recommended*. |
| [gemma-2-9b-it-Q5_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q5_K_L.gguf) | Q5_K_L | 6.87GB | false | Uses Q8_0 for embed and output weights. High quality, *recommended*. |
| [gemma-2-9b-it-Q5_K_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q5_K_M.gguf) | Q5_K_M | 6.65GB | false | High quality, *recommended*. |
| [gemma-2-9b-it-Q5_K_S.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q5_K_S.gguf) | Q5_K_S | 6.48GB | false | High quality, *recommended*. |
| [gemma-2-9b-it-Q4_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q4_K_L.gguf) | Q4_K_L | 5.98GB | false | Uses Q8_0 for embed and output weights. Good quality, *recommended*. |
| [gemma-2-9b-it-Q4_K_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q4_K_M.gguf) | Q4_K_M | 5.76GB | false | Good quality, default size for must use cases, *recommended*. |
| [gemma-2-9b-it-Q4_K_S.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q4_K_S.gguf) | Q4_K_S | 5.48GB | false | Slightly lower quality with more space savings, *recommended*. |
| [gemma-2-9b-it-IQ4_XS.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ4_XS.gguf) | IQ4_XS | 5.18GB | false | Decent quality, smaller than Q4_K_S with similar performance, *recommended*. |
| [gemma-2-9b-it-Q3_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q3_K_L.gguf) | Q3_K_L | 5.13GB | false | Lower quality but usable, good for low RAM availability. |
| [gemma-2-9b-it-Q3_K_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q3_K_M.gguf) | Q3_K_M | 4.76GB | false | Low quality. |
| [gemma-2-9b-it-IQ3_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ3_M.gguf) | IQ3_M | 4.49GB | false | Medium-low quality, new method with decent performance comparable to Q3_K_M. |
| [gemma-2-9b-it-Q3_K_S.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q3_K_S.gguf) | Q3_K_S | 4.34GB | false | Low quality, not recommended. |
| [gemma-2-9b-it-IQ3_XS.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ3_XS.gguf) | IQ3_XS | 4.14GB | false | Lower quality, new method with decent performance, slightly better than Q3_K_S. |
| [gemma-2-9b-it-Q2_K_L.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q2_K_L.gguf) | Q2_K_L | 4.03GB | false | Uses Q8_0 for embed and output weights. Very low quality but surprisingly usable. |
| [gemma-2-9b-it-Q2_K.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-Q2_K.gguf) | Q2_K | 3.81GB | false | Very low quality but surprisingly usable. |
| [gemma-2-9b-it-IQ3_XXS.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ3_XXS.gguf) | IQ3_XXS | 3.80GB | false | Lower quality, new method with decent performance, comparable to Q3 quants. |
| [gemma-2-9b-it-IQ2_M.gguf](https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/blob/main/gemma-2-9b-it-IQ2_M.gguf) | IQ2_M | 3.43GB | false | Relatively low quality, uses SOTA techniques to be surprisingly usable. |
## Credits
Thank you kalomaze and Dampf for assistance in creating the imatrix calibration dataset
Thank you ZeroWw for the inspiration to experiment with embed/output
## Downloading using huggingface-cli
@@ -106,3 +112,4 @@ These I-quants can also be used on CPU and Apple Metal, but will be slower than
The I-quants are *not* compatible with Vulcan, which is also AMD, so if you have an AMD card double check if you're using the rocBLAS build or the Vulcan build. At the time of writing this, LM Studio has a preview with ROCm support, and other inference engines have specific builds for ROCm.
Want to support my work? Visit my ko-fi page here: https://ko-fi.com/bartowski

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:94b6fe40ee612eb50159ad3e6fc97cc04c88ec7af8a813246d8d8c27c4522a00
size 3434988384
oid sha256:e152459c333bc687d3829e551e9c9c3f51abc087ba9f87d82033847e48db3df4
size 3434668992

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ee80dfba6b4b7aee3528d742526d984496e621640d3c75eb521a36afed2c6010
size 3211805536
oid sha256:0725ca79e7415d9a23d0cdcd3f2376c1b989777dd07cd8c088af23bb4fc1fa28
size 3211487104

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4f3ae196ff0f2a34927db3ed93451575394fb47e0a843f5d7915f2a9da02e7fd
size 3067700064
oid sha256:010fb98c74fd5b413008adb14f62a0a6886883936dbc5693f45fd389b54bfd40
size 3067381632

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:744e3ead642fcbde768639cf2532f78da5b8ca80ce1718fed712c0ab38a5ebda
size 4494995808
oid sha256:d117938c5ebd91f0cbe1962236d04b42a716b03ebf54560196773f51bd1b2acb
size 4494615488

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:294638961d516cbc520c0dcd6a143fe0226f9969ee483e3e0d89b0f6f3587e4d
size 4145369440
oid sha256:379fb78fa6453222a54d85ed04cb376663ccc838347412eccab587290c8df1b9
size 4144989120

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:79dfb350e10a4c7598b957c00381b7a8459f64e99457d4d05203dc2528683824
size 3797058400
oid sha256:8a57501217fee57586ec916fe196cce8e0f98d9a493eb817f3f27ea75b4904c9
size 3796739008

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a8817cc2720f51f1130d994fcacd58b8860c88882d10bbffb29ed4b5f62f155b
size 5183410528
oid sha256:c79966f200495368e8d1d4fecefbfdd0176327ec76968522c6fe13c397ad5d58
size 5183030208

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6c3d884107e4d01bac8316cbe755137beac8988cc9c8a9c8806b61f0c4d8bcc5
size 3805778272
oid sha256:2efce51c0d774a47c247c07ec6f109f670e3da4f4eec0f9322d72e3635622403
size 3805397952

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0c0c08d499a7a491f50bf68c6b925a27134a98f6a1fbcfbdeec558297884ea72
size 4027605952

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b8a45c72fee34ade31f7fcd2eec5e7eaafb51d7e1e80fe1762eff1a33e83d2fc
size 5354661760

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d97e40aef60b072b15d9bdd41b171f141f46b45a18eaee257d9bf91daea52e1f
size 5132833120
oid sha256:38fbe0303ac54882c72fd28f636abf731e4bd813809c7a1cffc829980391bcb3
size 5132452800

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:066d5cd0c5ad7a8d2b7407fb53f56858e6736c8248a06111b5f65afcbc709c8f
size 4762161504
oid sha256:3b5cb881500b1f72d456784a70214c8e49b7b3f49dae4fe524ca2c19e650b18b
size 4761781184

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6a0dc0e74df5cd76fb134249f4858bd4610049111482daa6dc1d8479ff283f21
size 4338045280
oid sha256:594a305f4806f982808a07e0271e6e9354e4557c2a1a72383005b41b200830d8
size 4337664960

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6609cc2ebba6311806ee84116af44e7fb26762f116572a8181aad4a5da5c9e95
size 5354660768

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:05dfbfc607fce0d039cac53098c387eb66c9c17587eb8bbf8b400c4769ae0252
size 6844347232
oid sha256:ed8573f0d1e3ca58e471498fd162a65d4f4253dffdec28a7ba394fdcae92f1f3
size 5983265728

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0452e54ba9b6c5b2e466fbd7f28c089fefb7c3dceddcc2b75becab6b16cae523
size 6843425696

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0874bf61be2e4b3d0a4a75e58fbd442dc410745d513c1e1e5de0b54ae33e65db
size 5761438048
oid sha256:13b2a7b4115bbd0900162edcebe476da1ba1fc24e718e8b40d32f6e300f56dfe
size 5761057728

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2ae8828f11ad498fa6fb0c31ccbd5f6ec0bc3b579613f164b8bebc663f9d1763
size 5479305568
oid sha256:61fd8af95257d1e39fa3bab7f4e4f7af65812f4fe017a808913f3c1c7151d33a
size 5478925248

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:381bc6455ee6bd33dfa3987d001678e390019e214a6f3e7fb17e22853a0102da
size 7730656096
oid sha256:fcff2482f87cf4ba5bf0dca01cbde26abc9c77aa0caac0a792636219ce827e75
size 6869574592

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fd2ef3823778e3c138aebb5892fcc4587313549ec3c21b84103090ce4b7617b4
size 6647746912
oid sha256:a4b0b55ce809a09baaefb789b0046ac77ecd502aba8aeb2ed63cc237d9f40ce7
size 6647366592

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4ba2748d33407c999dc1c2fe6845e39eb686792cb5e42d5d1bc20599cfb2bdb7
size 6483972448
oid sha256:ca83ab1d6a8df1f2b82ba2889dfeae77b878386ba60b8f55bad8cd370e2cc738
size 6483592128

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4e000b337787bd1cf2f07bbcf31709e6ff4fad7f383156741e674027d4038277
size 7811278720

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8b36d8f25b92b38a3c193f8e382f3df13699db42f65b124948db74b1f0a6b21b
size 10506446720

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3f38ef3fe66173a34e05b7f356fa0880b3941e64c94618623f4c243e1a4fde12
size 7589450080
oid sha256:70ea6fe1a4a8005ef6a0b379a87d271028cfa1b0be5fa2848e30419653d717a4
size 7589069760

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d69cac05ff85f04207610e19def2e741d0fda43c19e0bf64cc7dfef9f005a2ef
size 8672359264
oid sha256:497f1c006183cd186fb3503d90b80ba9a4518d8f6146a1f8e69615b20511c678
size 7811277760

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5f9cfe355265a0f78c4942e1a6cc6b7113ddb75748f7f1267490abfb34b0abc4
size 10687308704

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:68ef14eb96fb6ab0dfb04dc9d73e4b5ff42fda0011110a160881d20b3d84b594
size 9827640160
oid sha256:07ac9ec01217d7bf2976a3a0dbf564a91b6957f94ab2ad30671eb6100f64bc2b
size 9827148736

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0c2c8fe89751b1be794ce20ae92a764b3fe7fdfad735e5402650b5e40ad5788c
size 10688230240
oid sha256:e19cb326ba857bec47383ada7af3b0fd965605a44e0a3b6ab0b1e080a541597a
size 10687309696

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4f7d9c68f33c338a0d98faf6bad67cb13d39a5d4f3c87965bff4d62620d27d70
size 36974719488
oid sha256:ac2d53301d4e91f0c3a1835df213af9334eaccb08be76afd694ec2101b57bdaa
size 36972880544

View File

@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3ebc87750f8830146ae668032ca7e319bfbf89e4276c1a514aeee1be9e6addfd
size 6116901
oid sha256:40f30bcab948c839ffd9855c98ec76bffb7f5cae8d301d556ee8a96f5832058b
size 6116900