From 1de3ca51da3b014ff2d84720b6934aa073d61286 Mon Sep 17 00:00:00 2001 From: ModelHub XC Date: Mon, 4 May 2026 21:19:53 +0800 Subject: [PATCH] =?UTF-8?q?=E5=88=9D=E5=A7=8B=E5=8C=96=E9=A1=B9=E7=9B=AE?= =?UTF-8?q?=EF=BC=8C=E7=94=B1ModelHub=20XC=E7=A4=BE=E5=8C=BA=E6=8F=90?= =?UTF-8?q?=E4=BE=9B=E6=A8=A1=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Model: unsloth/Llama-3.1-Nemotron-Nano-4B-v1.1 Source: Original Platform --- .gitattributes | 49 + README.md | 443 ++++ accuracy_plot.png | Bin 0 -> 56775 bytes chat_template.jinja | 15 + config.json | 37 + configuration.json | 1 + generation_config.json | 12 + ...a_nemotron_nano_generic_tool_calling.jinja | 51 + llama_nemotron_nano_toolcall_parser.py | 109 + model-00001-of-00002.safetensors | 3 + model-00002-of-00002.safetensors | 3 + model.safetensors.index.json | 298 +++ special_tokens_map.json | 23 + tokenizer.json | 3 + tokenizer_config.json | 2067 +++++++++++++++++ 15 files changed, 3114 insertions(+) create mode 100644 .gitattributes create mode 100644 README.md create mode 100644 accuracy_plot.png create mode 100644 chat_template.jinja create mode 100644 config.json create mode 100644 configuration.json create mode 100644 generation_config.json create mode 100644 llama_nemotron_nano_generic_tool_calling.jinja create mode 100644 llama_nemotron_nano_toolcall_parser.py create mode 100644 model-00001-of-00002.safetensors create mode 100644 model-00002-of-00002.safetensors create mode 100644 model.safetensors.index.json create mode 100644 special_tokens_map.json create mode 100644 tokenizer.json create mode 100644 tokenizer_config.json diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..21b3632 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,49 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bin.* filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zstandard filter=lfs diff=lfs merge=lfs -text +*.tfevents* filter=lfs diff=lfs merge=lfs -text +*.db* filter=lfs diff=lfs merge=lfs -text +*.ark* filter=lfs diff=lfs merge=lfs -text +**/*ckpt*data* filter=lfs diff=lfs merge=lfs -text +**/*ckpt*.meta filter=lfs diff=lfs merge=lfs -text +**/*ckpt*.index filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.gguf* filter=lfs diff=lfs merge=lfs -text +*.ggml filter=lfs diff=lfs merge=lfs -text +*.llamafile* filter=lfs diff=lfs merge=lfs -text +*.pt2 filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text + +tokenizer.json filter=lfs diff=lfs merge=lfs -text \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..2c8a7a3 --- /dev/null +++ b/README.md @@ -0,0 +1,443 @@ +--- +library_name: transformers +license: other +license_name: nvidia-open-model-license +license_link: >- + https://www.nvidia.com/en-us/agreements/enterprise-software/nvidia-open-model-license/ +pipeline_tag: text-generation +language: +- en +tags: +- nvidia +- unsloth +- llama-3 +- pytorch +base_model: +- nvidia/Llama-3.1-Nemotron-Nano-4B-v1.1 +datasets: +- nvidia/Llama-Nemotron-Post-Training-Dataset +--- +
+

+ Unsloth Dynamic 2.0 achieves superior accuracy & outperforms other leading quants. +

+
+ + + + + + + + + +
+
+ + + +# Llama-3.1-Nemotron-Nano-4B-v1.1 + +## Model Overview + +![Accuracy Comparison Plot](./accuracy_plot.png) + +Llama-3.1-Nemotron-Nano-4B-v1.1 is a large language model (LLM) which is a derivative of [nvidia/Llama-3.1-Minitron-4B-Width-Base](https://huggingface.co/nvidia/Llama-3.1-Minitron-4B-Width-Base), which is created from Llama 3.1 8B using [our LLM compression technique](https://arxiv.org/abs/2408.11796) and offers improvements in model accuracy and efficiency. It is a reasoning model that is post trained for reasoning, human chat preferences, and tasks, such as RAG and tool calling. + +Llama-3.1-Nemotron-Nano-4B-v1.1 is a model which offers a great tradeoff between model accuracy and efficiency. The model fits on a single RTX GPU and can be used locally. The model supports a context length of 128K. + +This model underwent a multi-phase post-training process to enhance both its reasoning and non-reasoning capabilities. This includes a supervised fine-tuning stage for Math, Code, Reasoning, and Tool Calling as well as multiple reinforcement learning (RL) stages using Reward-aware Preference Optimization (RPO) algorithms for both chat and instruction-following. The final model checkpoint is obtained after merging the final SFT and RPO checkpoints + +This model is part of the Llama Nemotron Collection. You can find the other model(s) in this family here: +- [Llama-3.3-Nemotron-Ultra-253B-v1](https://huggingface.co/nvidia/Llama-3_1-Nemotron-Ultra-253B-v1) +- [Llama-3.3-Nemotron-Super-49B-v1](https://huggingface.co/nvidia/Llama-3.3-Nemotron-Super-49B-v1) +- [Llama-3.1-Nemotron-Nano-8B-v1](https://huggingface.co/nvidia/Llama-3.1-Nemotron-Nano-8B-v1) + +This model is ready for commercial use. + +## License/Terms of Use + +GOVERNING TERMS: Your use of this model is governed by the [NVIDIA Open Model License](https://www.nvidia.com/en-us/agreements/enterprise-software/nvidia-open-model-license/). Additional Information: [Llama 3.1 Community License Agreement](https://www.llama.com/llama3_1/license/). Built with Llama. + +**Model Developer:** NVIDIA + +**Model Dates:** Trained between August 2024 and May 2025 + +**Data Freshness:** The pretraining data has a cutoff of June 2023. + + +## Use Case: + +Developers designing AI Agent systems, chatbots, RAG systems, and other AI-powered applications. Also suitable for typical instruction-following tasks. Balance of model accuracy and compute efficiency (the model fits on a single RTX GPU and can be used locally). + +## Release Date:
+5/20/2025
+ +## References + +- [\[2408.11796\] LLM Pruning and Distillation in Practice: The Minitron Approach](https://arxiv.org/abs/2408.11796) +- [\[2502.00203\] Reward-aware Preference Optimization: A Unified Mathematical Framework for Model Alignment](https://arxiv.org/abs/2502.00203) +- [\[2505.00949\] Llama-Nemotron: Efficient Reasoning Models](https://arxiv.org/abs/2505.00949) + + +## Model Architecture + +**Architecture Type:** Dense decoder-only Transformer model + +**Network Architecture:** Llama 3.1 Minitron Width 4B Base + +## Intended use + +Llama-3.1-Nemotron-Nano-4B-v1.1 is a general purpose reasoning and chat model intended to be used in English and coding languages. Other non-English languages (German, French, Italian, Portuguese, Hindi, Spanish, and Thai) are also supported. + +# Input: +- **Input Type:** Text +- **Input Format:** String +- **Input Parameters:** One-Dimensional (1D) +- **Other Properties Related to Input:** Context length up to 131,072 tokens + +## Output: +- **Output Type:** Text +- **Output Format:** String +- **Output Parameters:** One-Dimensional (1D) +- **Other Properties Related to Output:** Context length up to 131,072 tokens + +## Model Version: +1.1 (5/20/2025) + +## Software Integration +- **Runtime Engine:** NeMo 24.12
+- **Recommended Hardware Microarchitecture Compatibility:** + - NVIDIA Hopper + - NVIDIA Ampere + +## Quick Start and Usage Recommendations: + +1. Reasoning mode (ON/OFF) is controlled via the system prompt, which must be set as shown in the example below. All instructions should be contained within the user prompt +2. We recommend setting temperature to `0.6`, and Top P to `0.95` for Reasoning ON mode +3. We recommend using greedy decoding for Reasoning OFF mode +4. We have provided a list of prompts to use for evaluation for each benchmark where a specific template is required + +See the snippet below for usage with Hugging Face Transformers library. Reasoning mode (ON/OFF) is controlled via system prompt. Please see the example below. +Our code requires the transformers package version to be `4.44.2` or higher. + + +### Example of “Reasoning On:” + +```python +import torch +import transformers + +model_id = "nvidia/Llama-3.1-Nemotron-Nano-4B-v1.1" +model_kwargs = {"torch_dtype": torch.bfloat16, "device_map": "auto"} +tokenizer = transformers.AutoTokenizer.from_pretrained(model_id) +tokenizer.pad_token_id = tokenizer.eos_token_id + +pipeline = transformers.pipeline( + "text-generation", + model=model_id, + tokenizer=tokenizer, + max_new_tokens=32768, + temperature=0.6, + top_p=0.95, + **model_kwargs +) + +# Thinking can be "on" or "off" +thinking = "on" + +print(pipeline([{"role": "system", "content": f"detailed thinking {thinking}"}, {"role": "user", "content": "Solve x*(sin(x)+2)=0"}])) +``` + + +### Example of “Reasoning Off:” + +```python +import torch +import transformers + +model_id = "nvidia/Llama-3.1-Nemotron-Nano-4B-v1" +model_kwargs = {"torch_dtype": torch.bfloat16, "device_map": "auto"} +tokenizer = transformers.AutoTokenizer.from_pretrained(model_id) +tokenizer.pad_token_id = tokenizer.eos_token_id + +pipeline = transformers.pipeline( + "text-generation", + model=model_id, + tokenizer=tokenizer, + max_new_tokens=32768, + do_sample=False, + **model_kwargs +) + +# Thinking can be "on" or "off" +thinking = "off" + +print(pipeline([{"role": "system", "content": f"detailed thinking {thinking}"}, {"role": "user", "content": "Solve x*(sin(x)+2)=0"}])) +``` + +For some prompts, even though thinking is disabled, the model emergently prefers to think before responding. But if desired, the users can prevent it by pre-filling the assistant response. + +```python +import torch +import transformers + +model_id = "nvidia/Llama-3.1-Nemotron-Nano-4B-v1.1" +model_kwargs = {"torch_dtype": torch.bfloat16, "device_map": "auto"} +tokenizer = transformers.AutoTokenizer.from_pretrained(model_id) +tokenizer.pad_token_id = tokenizer.eos_token_id + +# Thinking can be "on" or "off" +thinking = "off" + +pipeline = transformers.pipeline( + "text-generation", + model=model_id, + tokenizer=tokenizer, + max_new_tokens=32768, + do_sample=False, + **model_kwargs +) + +print(pipeline([{"role": "system", "content": f"detailed thinking {thinking}"}, {"role": "user", "content": "Solve x*(sin(x)+2)=0"}, {"role":"assistant", "content":"\n"}])) +``` + +## Running a vLLM Server with Tool-call Support + +Llama-3.1-Nemotron-Nano-4B-v1.1 supports tool calling. This HF repo hosts a tool-callilng parser as well as a chat template in Jinja, which can be used to launch a vLLM server. + +Here is a shell script example to launch a vLLM server with tool-call support. `vllm/vllm-openai:v0.6.6` or newer should support the model. + +```shell +#!/bin/bash + +CWD=$(pwd) +PORT=5000 +git clone https://huggingface.co/nvidia/Llama-3.1-Nemotron-Nano-4B-v1.1 +docker run -it --rm \ + --runtime=nvidia \ + --gpus all \ + --shm-size=16GB \ + -p ${PORT}:${PORT} \ + -v ${CWD}:${CWD} \ + vllm/vllm-openai:v0.6.6 \ + --model $CWD/Llama-3.1-Nemotron-Nano-4B-v1.1 \ + --trust-remote-code \ + --seed 1 \ + --host "0.0.0.0" \ + --port $PORT \ + --served-model-name "Llama-Nemotron-Nano-4B-v1.1" \ + --tensor-parallel-size 1 \ + --max-model-len 131072 \ + --gpu-memory-utilization 0.95 \ + --enforce-eager \ + --enable-auto-tool-choice \ + --tool-parser-plugin "${CWD}/Llama-3.1-Nemotron-Nano-4B-v1.1/llama_nemotron_nano_toolcall_parser.py" \ + --tool-call-parser "llama_nemotron_json" \ + --chat-template "${CWD}/Llama-3.1-Nemotron-Nano-4B-v1.1/llama_nemotron_nano_generic_tool_calling.jinja" +``` + +Alternatively, you can use a virtual environment to launch a vLLM server like below. + +```console +$ git clone https://huggingface.co/nvidia/Llama-3.1-Nemotron-Nano-4B-v1.1 + +$ conda create -n vllm python=3.12 -y +$ conda activate vllm + +$ python -m vllm.entrypoints.openai.api_server \ + --model Llama-3.1-Nemotron-Nano-4B-v1.1 \ + --trust-remote-code \ + --seed 1 \ + --host "0.0.0.0" \ + --port 5000 \ + --served-model-name "Llama-Nemotron-Nano-4B-v1.1" \ + --tensor-parallel-size 1 \ + --max-model-len 131072 \ + --gpu-memory-utilization 0.95 \ + --enforce-eager \ + --enable-auto-tool-choice \ + --tool-parser-plugin "Llama-3.1-Nemotron-Nano-4B-v1.1/llama_nemotron_nano_toolcall_parser.py" \ + --tool-call-parser "llama_nemotron_json" \ + --chat-template "Llama-3.1-Nemotron-Nano-4B-v1.1/llama_nemotron_nano_generic_tool_calling.jinja" +``` + +After launching a vLLM server, you can call the server with tool-call support using a Python script like below. + +```python +>>> from openai import OpenAI +>>> client = OpenAI( + base_url="http://0.0.0.0:5000/v1", + api_key="dummy", + ) + +>>> completion = client.chat.completions.create( + model="Llama-Nemotron-Nano-4B-v1.1", + messages=[ + {"role": "system", "content": "detailed thinking on"}, + {"role": "user", "content": "My bill is $100. What will be the amount for 18% tip?"}, + ], + tools=[ + {"type": "function", "function": {"name": "calculate_tip", "parameters": {"type": "object", "properties": {"bill_total": {"type": "integer", "description": "The total amount of the bill"}, "tip_percentage": {"type": "integer", "description": "The percentage of tip to be applied"}}, "required": ["bill_total", "tip_percentage"]}}}, + {"type": "function", "function": {"name": "convert_currency", "parameters": {"type": "object", "properties": {"amount": {"type": "integer", "description": "The amount to be converted"}, "from_currency": {"type": "string", "description": "The currency code to convert from"}, "to_currency": {"type": "string", "description": "The currency code to convert to"}}, "required": ["from_currency", "amount", "to_currency"]}}}, + ], + ) + +>>> completion.choices[0].message.content +'\nOkay, let\'s see. The user has a bill of $100 and wants to know the amount of a 18% tip. So, I need to calculate the tip amount. The available tools include calculate_tip, which requires bill_total and tip_percentage. The parameters are both integers. The bill_total is 100, and the tip percentage is 18. So, the function should multiply 100 by 18% and return 18.0. But wait, maybe the user wants the total including the tip? The question says "the amount for 18% tip," which could be interpreted as the tip amount itself. Since the function is called calculate_tip, it\'s likely that it\'s designed to compute the tip, not the total. So, using calculate_tip with bill_total=100 and tip_percentage=18 should give the correct result. The other function, convert_currency, isn\'t relevant here. So, I should call calculate_tip with those values.\n\n\n' + +>>> completion.choices[0].message.tool_calls +[ChatCompletionMessageToolCall(id='chatcmpl-tool-2972d86817344edc9c1e0f9cd398e999', function=Function(arguments='{"bill_total": 100, "tip_percentage": 18}', name='calculate_tip'), type='function')] +``` + + +## Inference: +**Engine:** Transformers +**Test Hardware:** + +- BF16: + - 1x RTX 50 Series GPUs + - 1x RTX 40 Series GPUs + - 1x RTX 30 Series GPUs + - 1x H100-80GB GPU + - 1x A100-80GB GPU + + +**Preferred/Supported] Operating System(s):** Linux
+ +## Training Datasets + +A large variety of training data was used for the post-training pipeline, including manually annotated data and synthetic data. + +The data for the multi-stage post-training phases for improvements in Code, Math, and Reasoning is a compilation of SFT and RL data that supports improvements of math, code, general reasoning, and instruction following capabilities of the original Llama instruct model. + +Prompts have been sourced from either public and open corpus or synthetically generated. Responses were synthetically generated by a variety of models, with some prompts containing responses for both Reasoning On and Off modes, to train the model to distinguish between two modes. + +**Data Collection for Training Datasets:**
+* Hybrid: Automated, Human, Synthetic
+ +**Data Labeling for Training Datasets:**
+* N/A
+ +## Evaluation Datasets + +We used the datasets listed below to evaluate Llama-3.1-Nemotron-Nano-4B-v1.1. + +**Data Collection for Evaluation Datasets:** Hybrid: Human/Synthetic + +**Data Labeling for Evaluation Datasets:** Hybrid: Human/Synthetic/Automatic + +## Evaluation Results + +These results contain both “Reasoning On”, and “Reasoning Off”. We recommend using temperature=`0.6`, top_p=`0.95` for “Reasoning On” mode, and greedy decoding for “Reasoning Off” mode. All evaluations are done with 32k sequence length. We run the benchmarks up to 16 times and average the scores to be more accurate. + +> NOTE: Where applicable, a Prompt Template will be provided. While completing benchmarks, please ensure that you are parsing for the correct output format as per the provided prompt in order to reproduce the benchmarks seen below. + +### MT-Bench + +| Reasoning Mode | Score | +|--------------|------------| +| Reasoning Off | 7.4 | +| Reasoning On | 8.0 | + + +### MATH500 + +| Reasoning Mode | pass@1 | +|--------------|------------| +| Reasoning Off | 71.8% | +| Reasoning On | 96.2% | + +User Prompt Template: + +``` +"Below is a math question. I want you to reason through the steps and then give a final answer. Your final answer should be in \boxed{}.\nQuestion: {question}" +``` + + +### AIME25 + +| Reasoning Mode | pass@1 | +|--------------|------------| +| Reasoning Off | 13.3% | +| Reasoning On | 46.3% | + +User Prompt Template: + +``` +"Below is a math question. I want you to reason through the steps and then give a final answer. Your final answer should be in \boxed{}.\nQuestion: {question}" +``` + + +### GPQA-D + +| Reasoning Mode | pass@1 | +|--------------|------------| +| Reasoning Off | 33.8% | +| Reasoning On | 55.1% | + +User Prompt Template: + + +``` +"What is the correct answer to this question: {question}\nChoices:\nA. {option_A}\nB. {option_B}\nC. {option_C}\nD. {option_D}\nLet's think step by step, and put the final answer (should be a single letter A, B, C, or D) into a \boxed{}" +``` + + +### IFEval + +| Reasoning Mode | Strict:Prompt | Strict:Instruction | +|--------------|------------|------------| +| Reasoning Off | 70.1% | 78.5% | +| Reasoning On | 75.5% | 82.6% | + +### BFCL v2 Live + +| Reasoning Mode | Score | +|--------------|------------| +| Reasoning Off | 63.6% | +| Reasoning On | 67.9% | + +User Prompt Template: + + +``` +{functions} + +{user_prompt} +``` + + +### MBPP 0-shot + +| Reasoning Mode | pass@1 | +|--------------|------------| +| Reasoning Off | 61.9% | +| Reasoning On | 85.8% | + +User Prompt Template: + + +```` +You are an exceptionally intelligent coding assistant that consistently delivers accurate and reliable responses to user instructions. + +@@ Instruction +Here is the given problem and test examples: +{prompt} +Please use the python programming language to solve this problem. +Please make sure that your code includes the functions from the test samples and that the input and output formats of these functions match the test samples. +Please return all completed codes in one code block. +This code block should be in the following format: +```python +# Your codes here +``` +```` + + +## Ethical Considerations: + +NVIDIA believes Trustworthy AI is a shared responsibility and we have established policies and practices to enable development for a wide array of AI applications. When downloaded or used in accordance with our terms of service, developers should work with their internal model team to ensure this model meets requirements for the relevant industry and use case and addresses unforeseen product misuse. + +For more detailed information on ethical considerations for this model, please see the Model Card++ [Explainability](explainability.md), [Bias](bias.md), [Safety & Security](safety.md), and [Privacy](privacy.md) Subcards. + +Please report security vulnerabilities or NVIDIA AI Concerns [here](https://www.nvidia.com/en-us/support/submit-security-vulnerability/). \ No newline at end of file diff --git a/accuracy_plot.png b/accuracy_plot.png new file mode 100644 index 0000000000000000000000000000000000000000..a4ea803f236d037d1d484a8ecd312dc5a6c40892 GIT binary patch literal 56775 zcmeFZcT`hZ)Hn(;DzSlJLupC^(wp=?pb|nynpCAkq!W4v3!+E~T}nU^kSe_w6+t>m zClrw;H4;cDA@DAv&Wz6Rt@p?8y|vzYA8WC=+G})5fPY9CaS&Ub_9}aS(`j z(BM?BYat6KCn-Q-aI_#9mm=igH(MvUTj}1}E`F9mFofq+MuQ+l`Vm3JHzxKaAF}S8 zw?8FEarE1VxVJOuN9pC|{U{16H*v%?>JWv+u$Gek`kmbZwzlb)D2jj}ny|?Xl-5a^ z2cbxYw95x5Zk+xy(Ai3(hLU`XzXX-Pk5favF=o$k5>3lzU|qR$jr!aIw|w`H6<;pqQ^}P)B4%u`}NjM@GC=$U`SWL zpt*;;Ulsp@dDOdY4%|F zc8FB0qO7REmD^!Ln-i3%q!flPlFaX-H?Dp$gYQTM#$mVXQqaTNJ-b?b7??p~CS@nka0hRFRsix^l@Al^V@GlSO?a{UOIponECXeG9F)8$y3=#v4K5UlCqlHv% zlfWGUW*{!J<296wgZFGH1U-i6d#>{L+!w#(_s8u+PXlXsnV!Nv$MZfncxE_E!B5sj z7QC(goFGcYQPx5FbmEfU#sNH=TRZ8CGR2+F!*X+PYSfW@6wy)?&m_cdYtH0SOOxMF zTuPF3+L1f5Hx7`GB|q#`&`Y!bDszSE=^qYhUHnJb-r!%K z&pn*@#wDJT^fv9ApcthIWcID+H<5~C`47Y2s($m_kQg~i-M}~DbV?4Mc1rWvWx7+0 z?-dU|f3CT5`Q%-mYzm#n+1L7B+|*3YlFXv)az^!PPdD40>m{NR$2#N-6br68 zT-8erPfhGT`_aAo-dBM&(PQ*49t$*ynd!$XsessyMul^{XLx(%Ht#Jjfp_uL`rNuF zbHYpCd6~a>4Ik7Q%o~&*JY*|bc0uw!WAlV#tC)Eb9WPCDljUc;({#m)3RDHt2>&vx z$&rG<@<#73FIGU?*L*nY&Wu0Ay^(EJr?sG}0w4Z^B@pr9;;EA-Xm3Kwsovd(Kj+rO zd{sOKV>>BI)Aoqah-kdt=&q%ubyJI|g-GAfT6o!-9)>WEJRiNKh-Rgaj88v@jxTxY z65p#(aX@`C+c^sTnsm{-x_JUx6uyoi|6h~v?!F{7M@T;4iviYM4J*=H{{ z$FxOBvbf9JKYIN9u6DwW6Rvaw76PrSJ`+~~Ul&>(%@-}pHV%``V97MgqsfoR(|F~m zdrR)*%dE@~S}K~{T2g6)nH!#g!=NV6SP;+A`Lk=>@ftU?rm`EgIW_KT%Tsu$6V?Y%CXb-wew&r{l`sPlYxj$Ap!^Fa4nwbF&28+~j2 zr)-u77C$BRei}H^Go00zO3+WtQXL2z;QhGV&D8U;du6~NPdjr}E-n=@pxQf{hkYfP zmRWE%t=yo%z(LUzj(Lg6G0oil5McZH_Ulin!=Ljn_D9&q+g3DxD?4dxVIF znc!B*zT1SR;lXFf+JUW<<9$uzR>i8p z9eRe6hLLu)OIgWb#<|8Z#m^|!+d-HT0;{!Kek_wp1TzJmnIGN>{{G1@R* zKeNC$!y{AS@d4fzvQ#o|-HrTmyuN1=PcftRYU(E5jWl zs#E=6Mc%lW<=-l+elb!za&)9xYD(&@6sJ^*$6Zf0kABZE&moUI{N42?50V$|+cNT1 zp;+$u{`m0ZuJf2DbE(qs^z zu|8ADhWCnAJ|&wCJAamu$@j9C&#UH7#)x2Z+uL~at=KufHWN43%2s^zG*VAP_qAS9 z==;WNjr~o2w+|ZdUou}SZhW!AbA0_PZgbq-u5ehZZrYHt{PWE;&s)|Lad}C31AebJ z(iVeX*{xu!9@{=4+Be&uwZ&m4iYAo`ZJq2_y47}*RbeIQAoO~*?qb!=yT(-xqE6)= z>v9RE28c?vO(LG}bja!282gyxI@vmg*=$6|^|eLUQNwz5JJX4}E?K;PP5HQuzjmpK z>m21`VEcit6Azgew?lTcJ1)j3JP591I>W#uM(ZC<2YTjKtO-3X3{0q|OQENnj zb#op98-aA;)P*DA_a?tg^7)04D{=R6Az@K#QG`qvbsb@?3rC-&W+Y`4&NR*h#%9GX z_sC>Q-efeq;4eU){zl&+6oWk7d8YIv7C)TPIh}IB;cQvG1jf-X%lvBdyQ7h3mrabE z3m4G1mt1kue5PM5TvDpPxJ3i+~2IAtJIKP-u!z)(xR zJ@;fH?rmID0p353TsvR-WDP{4Y#uIpoYBvLTxTwIXS*f1mpY2> zAajf?)28BX@22>d)zy|E%V<+jO`}9DajVznB8W7ef@yjW=7dC;JNBr>cg#6 zRVg@uXDSNHU~396@I(pxo(Fy@C=Q?>QXB^U(gMGVsi2=<53!^k`1uUl-D@bXtpJAu zf3?k>EiLU)cN|=vUN;N}h8nQGq3@!vsv=?TV8?sQ!r`_huZNxEUKa`}4+-GW&eG); zi-(=9Jxanu`qG~*B!K6=*L;^){%qo6BYjCn_CW;Ed!G5EmEc;}_%;6yyO~@Sr^HU2b{s*rQm# z5At&yMN5>qv$dm(wSzs&-nh4JJGi6i&Rup)^dH1i?W)fBswj-a?Ml@2z9HFW-94t zP0DYlcuODXC~^3oKQT4w<|gkX^{u!p6t(d#bdAlOrUZehXjtR|D1Lk3j{xa_E=ku< zp%I6iDI`??)!;{$HB5v4t}8tJKL-KY1j?U{2spP!-|N56MBTw4S{h$5jw;1{rZ{9Q z2fE#j)Yygm7o2hda?D4R_6Z9hOh5s7RryoQO^mi&KSF8M{o#fO4NKf(4<9&%<82bH|D%EkT#;uE*rEF_y@$_K%+GX1=QwN{4HbH014Zw8Pe;E{=L@eG zFmG$g8$SZjZm`zdedzTqg-S9JJ1ZOwL&Qd(=F*Lh;nItJ|Kx1fg0#pbxzCcIa@!=% zfw3z0F^0C-&=~Ke*RPYjM&5eXH$VJKitn+rPzrdUGMfKSjdO{#frE0YjUrA&&gR5x+EJ2+>23wvC!^y01QtQTR~!=u&i4x0^!Wi-O1iG$9a z;K#z*IkYG}cxbv~X3RW8=$&p0q!y_v1T14i0RE03;6lg8HTNZ8G-fqIj=t-gZT z!_r^nr-66tJF|&i-9faYmlWTg0jrJ8_jKm0aH`gTRuKDHxUU@YX#bG zcI~ibF;6du#!7n=+r6{kH=!q;vShZqcDL1AB*yD}y>;BnWXWqiIfODV!hDh0n;_3} zPd$;5atG7RPjPiOD)Wu2%W`j)W+(aYY))UK_WW?NpfJ>iyhWHEL3Q!4r+9pkuXZQ& z>BXu0;iD`U`twNTZRcetLxoxun76u}=U-(m`^!+<*An{C>r)ku#3AJPMCn%e1&5l# z?BbpLs@(9&Fv)z06pQ10H%qdHG5Hl=9=C34hI{qO&vN-b(Ib-Du{`A?T1%{=BaRA{ zOJ&$r&)HdkeZzPBYhN{s zcUst4GWOZX+7RU|t*kP($9Ku5t~GvZFPM#&>QQ!ylL%rQ!#k91JMu0%-Jo1N!^$or z^yyO_*D1mt!xZeK7@LNbV|KS^+tz)@*JFx1C2WXot2>f2l5UGx!-(o+&R3$79fjOB z`WL?&!iU$*CSK;@-}CvcsO`oxrCdA@`O_w!GY;fa^D7!DBV=$=MnW4e?urT-FZ+Hu z&?>E)Y;Y!=Sxh5E?hR(U1rrTkWBwL!_(mYXZ5q5uX9RDU(x7Z7uDi}Vf#UpE&iZSP z?e47W`DPt>3sE$68?w~-HWf(i^Er8I^c4{~u-YVASGWy8@6Z<~K`-c?sJ5M)uF+JE zn(cJPkVY~YeAnF3N}J_l!7^)UoacNByhn5BR6DA}z<#?k4m%m!!1~>SV_D)OmZAOr7-hV1M(=`XNq+iv%85wpc!1h zt;_CA%4Q3@w+l4i*hxOtS-S4;v|E9_S;UT|5hei@ofS!rBb%yS&n9r zX&q@4mmgHJUcXD8&E2vwCEeT%_TLERj8EKDdM(^i8Y_iQ&; zM&C=m{7SUBAh(B^{E>M{zc%@6qQrL>Hjwq~*Y^5zVKvNme>ii8lvpUE!HTqt?UO^W zg6T|hDVU9pMlHfQKZgQ;GpG}SpUxeHisRjbIF^u6nH}I{+B?dQ$b1%uVO2T)z%+0O z%E~Kfm;*)Nv76rP6_rdySJ5)SWj~ciuL)!O{Pon!cB1O0ONn-?H$er(bkvocmtfd_ zb0t~9{$4{_J-gm5_4#~~K%8k=+c~raJm7K*1}Qa3uVd`#Ace2y^-TKg8A{h6q2XPM zBG+wqdiM;}h+tEFtEJR#Ukuz#dTCm2+SN4)=wtjMQntECB~^@)03Kx8`4$`pzDBBS zlt6~>ws&o-xazt!MVqbd3BR6={q5Vg{a;_1?5uj^l#3leHN}@kSeVSXx={!dnj~;} z_Yxp7Gr1(O+Rq31vO4R!hLvcpAjM{n#7k(HlqZXzX4V$VdJ1f#2Ihq$>V@h`e6!GY zD66fW6k-Wls8SnmL`wxZZ|d5o(^nVAMTNej{SSI>v-mVtIXS)nlmqW-rG4bJxydSL}AO4EqW`&VMsb$N70&NK|E$5UFxl ztP>Rz%C|abQgouo)Y7=i4dLs_Q$P7^JRy8Le?Zk(kJy%rX}=XVQM`^>;~!) zeU08Qh)}EWld~=mrRT9oNQ9{c>rKg~B-p)V#yVnuW@EO=c9a|om6aTKSsREOv zlZwUYPQf)Um%mi{{H@u4yE#WFl8#%Qf(MCUry>K|qxeWXtUG?me|gp~e|t@R*Tt31 z4Gn?3?k%_SoML4*Fxhx}HrA+vUvD##c{7qTAPOr0Q@VMWbHNTZL&lJYFgk&sIub?W zk|LAM)aE-9o>wNrgIebdLH8h^VINjMhWnLBm=gdsX|h?!Oe*A*i%Qk%9Ba=LbQW1a z0ot6iJ7IQbp?)Vv`C0nYP+7{Uq>G6wb&HmKIr*112mHwc^PCUXIF_0(>m98jmKNkS zMNs&Gbj2k}X8}XfroP>%KElxu?KI7e^I)Tjg(}(`pc}Lqr<64~khXh@ZF9$v1r|ZJ zsVCa>(T^1PlNWLXrb36o^`FKOyp`!k_}S%RJX`vXCJY$eillG~>Z-JBT12NBb+(&~ zDzmkhb(MMY=EG;8G&=0&R)=ko16V%)Ow1gL@k-q|d0Ef-Li~n>AF9kP=z;&*b^jZq zY;0JzEWefaXZ7?brpkE4qr~Ljd(8Ip6sj>A2#F_(+wd(&%fQ{HGSyB&3u978M{$1Z zoE{s9`ZYw~v(|AA__g$<+{u#U`|9&u1}h1PiwJ|GLtUQ+ph8yTUo!GpDHn5FlBA`p z(`8@POVh_-&%w+L3#v=>#)G(!UzxL^mkkQSZ7)oM4JWr^3VdD)dJa^N=^W(iDKH9I z*<1gMqrf3f^KD|q4pyd*butk_S>m139&ubLTFwvjjZoam{=3trJ1?Kr z8^O?uDoxSi?S5#(rascmFjji?etG3E(=L11g5d5_rzg+OCY+}7L^?88Am_jgfuYYv zIB@+QOjrxY5^GaXTUN7V=a7R=ne)$YXhSM5I*0l-OO9zCY>B$^Xx*HdG#5c{b!w3WxzJEv;bJ|S{Q=Ik2L-Z`^7eXqFQ z3_!EC%sE=@m~ z0QjwQtQYNZ$R8PX;7XGne4JiO=*YFXf}Qz->;Q+LD-bKTlTpQ%JpQ5ieGr=Q+(~YG5oMX}4-bS_&?z zLlKmNp1_|$sbut8wz{6RRu8B<69Q~m-Ro0SD`sa7qf4@-JqN#U#RI*Z=o%&7BJtRIdBrrbpR8WQ-ux;_ z2h(T~)Fqx`**R}mr%ZK93D$6wuKv z4DU&Le$#rKlSVpGP{p#Y^PTF%7YwLXv6eiB98`6|wuPT_t_>f2$^z3>D2&o57Z`9) zni`BH^O4q#nj48;#ip}j$)%B%h}#!^Ro{xOCN%1VrO~?+A3#m13mN+JP$P~q`ZU|4ZAVEQ<>s13fBoysD{4U$Y$gXXl zuvvHGPnmIfv2`ajUJK%&M{LEfv&YA!YJ$oj-(UerpS_z>c3?D&PkIy0Z0D6f%eqQWbEv-> zZe4(6&9u*g)q289b}O2t^QVrHY!l`5gAxvN!XG!X%jcBDq`Dg8r+W)LJ8ZJ+8hKv2 zsnBr2CK1os;lWTUT93ylwD6F~q{_F;ZV<)yY$ukGhu8z2)v>WWlXDpptxhL{U($vm z8aR>NR%)~sc9@(Y+`??VP?y0Zy&>Ad00}uav-q_pmcpV2t^dS0MIC+FMi><}YfjUa z`(p8^r~ffjnG3BGM9hk8kwUlFCZWP2sdr_{7H1TLuA)1qg63Ssl*X}`6CZ#r=%A{p z0(v6pOW@il$F>Cy&m;@^{y)&L97DZA3*Ypmc0J|!j;(ebNjQZCsr?q3#-OXxRfP;| zc6H`lD5_1*?@gevR1T_iB`;y8os3q!hO-9lrl^n3T@PxSQ^*G?zh%X;8$_;K?5g-9 z%Q5Taaciu4G=07ww%f}s!c4ZmoYfn$~59FtqjbxBhvM*wg^nr zxfUZUQ_?HU)+^5LWSLp92r_%Vc(sfP{8GBbvh%TyVI_X?=EvK~uISmKYg%}tev7S> z)uDXN3As^F$Jt@o9MKcq{(@cNV;18|-=yq0#LF6aVCEf#SH>V;X5-!796Wo&znPD8 zwa)uu;4Z^1o<3UmRDHYkl8H_~5wl^LH?@KBLWA3CtYn4}{iZYASmsu#)hMV^*a2}i ztMPWP774ht!D>P?gMv_I!uaA7@#U%bNZ%bZn4bQW5Fey4H_vAo<`lPuzd79M75bie z`wjCAl19IJQv#Voui3J?FV9?oHqA+eaZ?2`ZWRYL(?$}bN)k$w&9!9A(Jme1o4KK$ z^E&&A3K~k4T#P`qMJBZ*HvB;{=!cwwZA5%l3px*xfqn*rhcOGz&q|q^q8j z%KqLc7VS<&wOzU^2$m}gEN>>jgipKCQ1O(}D)P0aXB7HDUn@m@>*`tF+PmW50G0$p3JMzGh6*mmvCCbe#1 z`K}~`r2C4r^akUvMC}I3SKwIqZA&I0B;*F$9gHjeF8EAOaCC2+TX7j3i%K$td*&Dp z+^OxH6TO&YV7h7l0E#HW%yQlbuPi9uDqW%6iIVfQr;G6wf`#0JO3!c(;ml)wFOn>c zi)7hX(1r}t*(qR+s1d~Omsl;_J%aa%HAy{H3k)1209z1xGrt+V7zL4<%?=KgMV(zG z8aBQhcl|6va;DW1*Ou*v>rS~V_p34p$$CmRKU}n-_qR8V%E{0|n<=3=0#`>aUDiXt zm-bZ>EU$a18wQ6rrHmhWjFjy<-x{MUer9SulsI)uXN6*iy*UPYy69TehqmGNXKG&x zWXdd;dYiv(G>f1#NXYTFvY~L)v<4B&uuxpk7m|PA<5Y#QsX$~`4fHXRo#n*Z*iaX8 zw`)bR`JRz#)P0AY&ZKD+zaG(o$I`BEoHBA!H9z}<&M^bJp~EM1i!$)yiB6WgA0|Tq zPngC+R#|Jc!@&>L=VuR#H6K_+n;Ykv>oa2)?6genFy^{pk}F}z;ZT1WNi3bDr^(MK z6#@ucSIqR>`IGzQOd(Up7Qt8P96#iR(5AMWJ_Nz{$%hQdKLIn(vt09Lxg;M4evJ}w z>ts>mpZwHs_iXq81M0YpD>ObCYn~nkK7=|N1%6H&&{F#a)#L%X4Eex5_^ja;NPX`5 zXk2e{^OA2X3Q`A^iz&9ED{0m5xyHmknCPZ8OiBuMRMPXvEiq4_LzkJOBZ-mM*bb)hhT(t_(dHBb^I*T z!&_N&k#f@r9+f(S+gZlxlGS}73^Rn>p>}gn?GPJMf&EnkwhSuf^*YC(=!|op9Qq&! zya~o3pa*}>Z^ManZ_eU1FkMkGtlvzmH!z-v#I{Tk1>h>_B{+WqX=W=KJ7ck!@FIFm z`VDzAngQe7mTmMg25Vs{`++CAC}n}n)m6%@jd%AOMhqj2;WwbENmKGobPJ(Es1y+` z+AE!fJfyz){%39D2~XNp(aeM`c%r!4bZIn{fp)vYyO`X zkGfg5d3pHC_`Qhx`e;|UV+XGRd!_P4J>hAO3+uTPV&%*|$a zV%4YLL^_>b2E_&O)BSsN(qIR8x786wHfv1A3Dw zY&v&TzZIRZ&qx8^K^^k4PmD~p3gWJW!9j1AUOXrc_$JTHY{EMDHC>V_nicZgXf#63lUBJ>)K7UUSq5n;V?zPUHSHIh7Kn5T%aKNurm*^y2?v>v6Q!e7kmw3E~C z4o&Z>1hFje*+@1&SoEe>C<{R{R^*LKlH6&ni<{#Ki2>IAUR&C6;NVHqIImiEjU@B2 z={%gF(l-%!mO`oFK;-zGdy^^iflq$(Aqq*fQ)+KZc!$c1-HJ@Dm{l=?xH(f>AtKcJ zcKocCJ7_ThV`O)Ooqb_zYsG!Lcn3=sYED?K>a{22br$&&qc#}A{6o#7OQY^P4|m2~ ztvqi@>=Peid$YbsnnR%7HW@Z_ZI;;H#Fe3ZRDFlki_gqz7o9*cwAZj%C*{2V6b!9G z5fp}1lHqrnpypj$owK)zZ-_Z#Gd(6G5qKkV$!fIr%jtrwFm{73fp(8#oMqx7wRlEp z+&PO-ag*usLvZ_-yPl@#hvNOv%2;0@-Vstda+kB5Q>7L#R=73)I^CruZ@ zZy*-@kj_KZaESm77~XOB9)%oXXNeM+X)B{+XpggSdPl5W*U+$}%-4)vLr{su=~;Su7#@Bj{3SMo}0UMiruROC2O zf&+2o{jP)_9N2|UrEO5K33`UsRy}|plAn8TW9rEO3K0c8h|X=%fe3WPKn$wk$EK8H zazFaW+ks*L|0ECkE$RLZP&54bbZzAWgM6b8?mkn>kCI+8?t(Eum~B?K{McisVZfX? zCuWo+dlfU$g4>5W65ja;ptdkv4EV8;+c0@-jcgZtLWJ4vAS|3MX}sz|SDNw+9-{8? z$LVisL0pi!wIw;2U7yYKF*eH8EkvqGd-n4$!ly9Wt?QbfK!Wm&-F+ zNqDa??Mi8F+H*^mZq=LKZbi^8%M|A)ub~_ZXmbspB40njrrZfA%9$b5vR@uF2nVIN z+ZW{Lkln)+=8*kYhd(%7v0+8U=d@~F_m;M12cDA+^u|f#OniK49}c!~k)&8xQo##9Wa0k`f#wmPnbCh1;I^n&p3n3_KWe+Pkp1OozMlgL zCpJs2)GFA?4_aj2gh$9Ot43#2J}As$m_j(a8r zAj(u<^8&UYgABN`MRv3&JdN1-@M5@v1Aojpkn4Ii4=3Y&f>>V3+D%K-U-y0TsD_;q zy*))p8cLYO@03IqtFF4w=bCnjEEUXhuk~UCGW>@`%-g+JZ*IjeEdYKrj4-n}176p5 zA2U3>wCbZQ-Be;wo@j?fG-*{2xFSR6w1<@#J!P zxz?I1V@|=SxC}@xqzFaW)R!a=&~4|M77WOZVRqOfFn+x_RjdJX!i(ORGQ9iz-Gb`# zMhmiY3MOsw^KPp*js534N%MqwJE8vV{5`fxobi~XE`)1b-xX8Qs?swvT~=upw}WD+Ji zR2a6ZMVfW8y?r5bI~lWKH99NA)m$ajX_V2(xl|HYG2=^^HJa}t5~Qqnkcl;Xu6;1O zSzk}TeDM$7MXLSs^PMXfo5R=2$LfbwwcKmN_fj`@J#lMXZk|c&z1Nn=aIAnz^_+Qr zfRgNL-8SRo+QsIaje&xf9bPBC#LX^jt%|{x`kQNd4M_tygzu35?0E&9z}+8ZV+ZyM zEgZEo-hD5$*a7Vlr`=|c3Q3akm76uI;WNQqYK)zKSmkqEa)N?yD!U3P#h^QkP%BI)qFb|waA1^VJh^5Vx;nzSX1Rle8h z^yp0J<~V$F6;)eb>wnmrY*tV@AaFUHbW19EQDI)9yQWK+J=xYfe~Wr;WT_h2s^>6f zm|sZhFbffu2XCc5%h)34%H+lq>I#Mgd6plta$bMc{>Rmbz@UM_o7bLR|`sGSecYfsXa8;$g6Xck@iuB zX;a5WlL~?4cd^rI&zv)V0e~?gd1cLTb$#|WDwo8c#H4%4l~|K=6WE}7haX3CeC0l9bD}@I<7!o)~&t%X*q4n z<@Eq=-X?gyVBiP%JEtN7zIJ(jwDR*cGn#CpAJ)KZxb z(oVJ7A?AziW_z0N9c-Fe-BGiP@_sSP^p(KlBcW5^MdhNvGSHT%FQFUDd~KN*XI;&z zDO&YX?PrlupbQN#)jfFk!S7S41{NO76H<}%7JX^&rZ))!(n` zQ2{8MJT7nZ@VkcpSKVPKx?yl?YU=XSf&E6H2s{MdtDQQ3=NjFgBJm%PG+c&M9PGK= zQC3L%uNl54DjtHNEa!vzkA1f(|3dT}3_w%!nVmW8U!T4YcpRc6cLkhB|C9E=jl-`D zp!wnown_OvpxZSv>v9`Rtt51I!x@_`R$@H&AxPO4j^d zIrJY~$6wyiU_==?^=_{w1z$B#n?K^UkVm>+>_^^m1_H0q<+V$8)7qG=8=kG&DO!64 zYB9F5($dB$t4XubXKTM+ovvEQ=ZZ6OhI_u}GSMV1k5=!Jnf6&gN>K9IbC0-K>}yt< z@Ihy5QhmPAVJ>6K>~d(23coiT-|BY;Dt)5e>q(=QLq(Q4L0^OmJXI^dy*=7?U3Tju z{tdO?Yae_RhMSI#ZhFrh_8ePH(c`V#_~P6|k6f$|dY<3*Lu3D&!&8Mo2b>Nz#^{+C zs7b9f&bK}sDp%iH%ak>Sol%n}^zK!fML*%K%JLyEnDW^S5+LZto z*{j4D_7{FPmx(d4E4Lr5awZc7^z>x7jVfLbOX(@?7357xvRYYL&2w(E{d@TDZzQ<+ zQw-?{-taFhvWcP&W0*Ti7}RNn6vHZ)xt)s^)zMgo`dZuJl2;?EDeEJ?iD}k4K5W- zZyvuO60NN!K7Ar2RLV3CD(nMy&6^P2N4P6&KnIcoK1ASL3%p}&@++-&PahvJ@wOv~ zjYA57@}88wXFN2}4qlLP}~TocF#d*^0^QdUC$6pSZ)brn)G0^uB!late*5PUpO&hT*V z)sMAtnQpdn$~!Gr&c=565sRnu9DHqDu^BRq&+YfaB8^7ByK1z%-7?Ntm$z~XUB|3Z zhneU%bl9uGylqfq0q+|h;pH$@Og7{K)}bKD5euKio${(K|B#ji0=`Ee*M=jpy9#sX@) z+@cS$&k)!2S*!6Lj+chS|O)J8mBgLtg`WpGv(h?MHy|ck>fy4;V%lgl_HcAAX3E*?r*5 z#5E86@#K9bZ~>z5&kc*__j3$bxdFrXHp=1aJ`+tO(yXS5N76KaWB-6eq zP=ZEj&Lzgjf4x>azaRI3ujx1N#Q)EDnAI4b&iP44eooLrc>xF3-NDYbB1bpLDD0+} ziuscDqhHA|avC6i_466&AJ_%PHVE4k@G@5~lc!@<$2oh1dY&dEx%7^!w$yZOV;xh^jcdUJL9pkLC2I7j4pEvf7~M znx0R8k>-X8s*Mwm-hC|M`9Z_a)qBA6TyV>^1NHo&L2)Z9EAM>bI)3pB%x-|2*xA^wS>`suf2hNA%7K8_6h~MXh}-ju+uPf(h%_BM zdL~*YM^{Id_Tv3G3Jleo5@3$XSI(B~Mn(0zSO`8_9QYN*G39~IG^xu?%11gft7 z0@&a195MieCjzNH{tLPPZY+n{J3Bhp`1Q9x))J5z$D86VJ3Jb&p=58wrqUj{fEuRJOIdkiKu94xY^ zuUF^NliB+8LV-<<9^keCY1|de^!?N9adf&VpFBwCzq^L-c=zm;z@qsTnctW(RsA5b zEZ<|W5NV_CRqlRQeW=Jr`-PtInnmjQ@4_fe`wiAB9W=nbACp-DOm-{9*qrs>F3X>9 zq=6vP*ON6?`*3js|52cNe|7Qvg?%RY8VLwm_-En$b}J9}#3UX$F!-0|`t?CB9PrG% zzy$`sq5e~l_}c+Fd?!`iwBL@(YcI;i>d~>^ZspdVpwA2mi|jK&H!DCPrZ+aRfZF+6 zy8j09STV3$;qS^X{KdchyKx{Buv=N9GAaJA&VY|%77wVZhn{Xv_uKHT?S=WSe|ts< zEeHL)3*e`w{oTiTx3_a?FSfDWXR_4mph@YwdcVTb-3;1ezH-|gtDzEa}5UZz7m-*nEpVt*E(`8&1&r9t(q)blZ;LEi(AC4nxbM>7%d;oGLj#Px;=eDw z9380IZHd#SKbJ$&ecASTW%koO3;7Ze7YkU)m=|FZ*@bSx-|rQ8Msp4mAAjdLqusB| z6&QGW@U>l)kvDd%$b4Ppg}K<>xgh6)sIxJ2;ckbg?cd>siZg8oU#IQGrBu|_-K#4l zdolN~=hx`hcogPbTxOZ1-h*qOll5P-8_JA@rXRYJTBpi0TkGbm=x8pS;k`VWhoK8M z8R-+qu8B@aNI*jsds>1AGdqlYBNgvs1q`0EENG-*jKMU7+rX=gNvM z)r^FnJA4TqKuE1M3H!(pCFm;X?WO++$bMH^61+ffC#}=_h&UynA^QIo$lt`c+X;X& zEGj!+o7#qf=Icvj*wx}uZ4-LHszd0_se)mPomaq~^cb!EjX8To0Ifa06cT4DGN-tWVN zX|TY9seP9)ZB2~hGH;)qz%3m;!+kl?J)aNQ`Cq1V@^C3zejR`d_{yI(|F^RG2Lnc6 zc}J@@o0_7iBQXgH99TCy4$<7*st}fLZGa#tc$=jEAV}b?Q}On&@4M}H(2MpC-^$dB zvFoCNz}{5<@LzKN+k;#)pyDLdvj6ynm;SMwa-g^8|MBln-Twc^>P7^-KJ=gT0VWR& z1B{zX(v1hd;qw<^_VQfN@0q%vtNTwh3fr+%;BPC>u`Xq;nILv+VrN$B&NaX|&?EEyv9NzbVwQd|!w~YQG%>T4 zV6z@0O}goS<;Iy3S{kfdXEcvwQ`^1e!mKo1VLD3FKkOL)bBF-BfrA-0=^riF+lXvN z$WOOE;VD;-yKqy_ey@g0@*?i zBl}E{`UKDv#*I3{|LF72rss43$^yrC$8PXeeSPi>BoK7r4-0&zxh#Q0 z??ymfQr~G>;I@Fvd#A2j5hi%_o^jvG+iz4#5aXG1&%lj`WjEe9_o#}-Af*=!EayJG zJgseC>@xA-Z0z2_=jn~faK1QGKTpCa4YluAg|@w;fZ_%BD}c*5Bvo=Hz6*odxr1)Y z!v-0KdM3o|_I8yxV7246u`oR^V)m3pcolQ){5>}a&DWZSW#j6$_BEARt`y``@q9wh zt|+#$INDdccJ;$-2XNm+>vPYoJYJ$+1? zz6y~6AME1BR>It)*`?>;eeSZo68y+23<%Q{YV3x%=ItTKuAIB4Bz}Fa-O(^&A}BfvJ_-f(R@v7sGF+=^0{`>3-Vn3 zCT^uW$z$r6SKgUtN~FSYX)P*k<_cZIIJb9gK)rFm*F*TddqMK+w&wBpxcVCIn`^bG zKk{qvt$X1L=Y9jEPDehFQCzf5u5uo6YG(X~$O@O3&$^3to(cRJx)qCP=nWfn-*9gZ zEL-SbH9vYbcDov;=gH=XHZ$gHeTyTSPq)U@-Z4o8Tu}JutXeL_9?~O%qRr)g97hfH z)=gX zo3WisTQ;I^rLkM zanK`kp)QAT{QYT~&HP~K876#0@W@2=!)@l3fjVNT7CTyKoOjJZfchpTV@bU2rUhap~y=7;d@qmMJkK?sV z+ovKk?*EIu_Y7+~+xmt_Y=Z?A#!&_k3q|QgKsuI@5{iNldXOR_O+xQ7D2O7xg=(QF zRcZ(&pdu|4=@Lp52qhSh5=tQC-C@QfmYnCqd%f3v?&m#x3dz6hz4qE`uliforgggI1Hq(T7&649e`B3{XC~QY@W#J*1YfEjP*gNUDDM?OXvPbZ)MG z#Zw@}GBD5NZ<81wa3AZ?vZdBmlR(UP;ZUPsLYxrMYRTg~2qn+vj5g0yOHWk>BWoQw zY@zwaqG?kA0Tei~czI>7>CR@O#uIIu0ak@)R`HH`t8VKwUpG$>qK5ghl3Jy3#2R_qPm;opaw6M-IHWgvjGR)wA;r^dDG$$PUWu)WT71m_(k z>m5NSw>h1Dz{L?KWa9O_wTfOu?pGI;+9M}ie$|$s_pI9G9hXP;+@Iawck1Wob!sh; zQ~fAsYu6ld=4C~&&SBEy37Lsqnm@-l^U*T9bt`*M!l$9iWp6FFxsKQO_F{^xFwIZ@ zd|2`R!QV%qk~2eBSC>^+riK+j$*}?$JC7uijYhMEKp6b3;5?hoY(Wr+<=84Lew#~# z-BIQ+WJo)|c0!otY@TxI7n#3(u$gjz$HYr@q)TqDof35CJZr^`*2E%^e~@1$4#iix z&*n#wvr^o!#=_nXjWo;J=dbzKA<-$6J#yae_vFa9+S_N|U;EjHo91@=KhX zq!D?}fTCy%M;(|WSPcZqz}|E_!Y;_G7uyJICGOFaAnN4y>3v7^A$*yZjbhE=FLVv4 zqJYM8N}9Nf9>`t=KQLrWV5Rg_wn(7y z9Bs0Zx=H5+?@WNoG(*m&=Z6hV2i_AE&C2uiUNA$`3HzYLt+d~&U2xHKLO6DR;y1+p zy0u-PKqst3-=gP4J#~f#^d%j&o$eHr9(yxbfE1dc9FtZ$qqqYWGBkYb`H!OjZsN7f z8_hugM`^$)lWqFr%4_W~0|mV>2THDf(c>>Zf;Yha6&+l#b@}cEwRafp`lWU9$>XU- zzJ2Nz@|?E5kjc(J_cmugt8d;ugVtw$D(Xq|SDKa6nBCj&QP_8cIC?z5#M`=AkS(*B zY~N}^xw0^bUXx`EmtVWVb^O+EH8jroaDRv4s{M~S439Z?ENRB}#vW+p-;G=Y`ta^Wh+_dk4@R4oTE*=K4$ZhB>YHV#0B(Uj+x zXb(6SQyau#i%HCriudjEK2U1YU|Q3zdVmk}d)}nJFW=w(<8NPNwgHH^fGpzP#^8SS z1FmKri67UHipi^#il3T#QK^slSVIibL7V!Gp%qQTOuYs_*7VE%Y%=Fg0%D&US-8Ql zgNge#u|hb$9k()9Gm6XE@BP9#Al!6?*B@Kt+yW+F{hmfk{^TUIqVa`F1dNZfbE;R z{0y>?%0%Q{`b%38)^s zC#xOi*>D@TRzLS`WaRdHIU{UDwiS8}`8HQ;`IvdGZ%;Li3l*N02weKP1^}WAnipqJ zI@IV<-X`Ur#tr)!J!=Zq_pyCbir*{cSM>>j?J77+{w}X*L($uvfw#$^_rUrPqPkdG za`|n<4u-q3#_&Kc(>qVuKZ;zvc4`@v|(XZ#Ht8PT9hu9-t6%xa?n91K%k}K|pykKb_sLNl&>M2Il@^ z??5j-4fzVtUibYeF`)X>1v+^Aoe|K^hP?*<_?JTX?T4d#!0%6<-;n*1?DjpMdeIyn zMvg}G@?kCl^;Pt@9y{KjFYX;BT6VJ6w7o4I_0jA!P~U={CK9>69oWCdF_{Ob?pSS4 zFCApxJ)j?6xmKLAH4|cUhy7zqR0FGgG7p(YCzc-YxHZR$7UoW{R* z@7_vxnv)XWBHk`=>eMMK2ZwxPpSidAd5o}d1E;@_G!4gla&mH}k4#T>rv-FCtP7MP zZN{76D<6a7LDA2(3&HGg6%>f0Y`Qdu=i`tuG}m&Eb05s#S9*ES8AAg~R=wZW5G6rx zV+=8D_urdI1OKd|Qf7tS(F-Y<>KWGp_^db1d%5QT44@!lHa*lMA-r(_FFS{pH9|uw zr&Tdofj_Cg=zBKW_Uv0feDH{g%T;OeK{XlvA#B~;bMhjmp~;C315MjgN{?|n;gw~ptE;IzCy;jiBiT8A z2lnB?_ZAEb5BaUGzld4A$L8;D_jXOcqfBtRx%biZ(%c@cZqmm9EW;XHclvk zjpk7PF^KOfVm8RY)O4m_Vd8fuWe(SCP0L;SK)k$++sR`MZAKQD2(XA_tnx`D*D_#F-YXVUqx5($>>xfHL6rv+6%JLj=A7f~k-t#o% z(#^stTC_}hrhL>5HWiU`GU{<7*Vu5}l5p|xfbA~ITr7TAeb=W}n?l!J(e2lWkarR*|Fe2B91zka>2rCmKPC%+<_hPa;vRZRcg+S(dZ|A;NejgJ@sf@!5E zUY3{Z{~@1Q=D_P|!G?U-l};dFG4_JfaAPkbe3gKQSb60mi5s!9@y+@D8DbRh=qZAN5K zRc9rOzQbP~WUVl%GYzj_W9>$Z95MUDbf&#DTO@od$JurIN}CG@Zx7-;>>OVn(x;sG z?9Z^B?`3(_71EPL0l%;lHn_x@hMtC{-rqEeAvwPG*rx-(ke0dJy=B)bHv+Vj)1qemk8*J2$=DE&GQJ#SvU0?9cVBC2H#CJWxd-|p;Kb6t zxc*1G3cwm+m+e8P8UZAaK%ya!;Xn;JF$Xihu14y2FjtCn<8?|KYiMk&^(SfS=-O27 zAZwa6w1kOCDfa23em7$I4Fp&Ppy9?QVDBdKE8BLS#;X*OvN?38X5+zG=)w2jVb=zRCa7#)Epr`y ztFWbwYgkh9Le$us{VY0o0)F*!nd7q^y-`Zc2iIf{RSc|Ebjl=|Ik@G^WR4euN1zf~ zl*iwrU&dIHKWZG#P&-?ISFqRYY-e1dlz?FQNzM^kz&)`WgLvy+HOi4w!}8u!F66$~ zYbOva-?JF4rQo@&dnkwh6zkJLo(#Ws!bIO|YLdcs*^npe`au^TC*F4aoJ+Oyr%aNI z&UMN}KYs-uQ%WnkPx*M?+gqBQ;n}HYR_U+YxRW$lJM+&X2Gs`)IS2YA5-d|3TeXDp za*1wbO{hPi4a&U4&`ktW_958UH28L$z z6jojx(Az#XSY+e4z1%W{FdKT4{N`OA(UDxA*&B>5T?)$2l*`mr40aqI&KoKoR$w`2 zM|fH7H|%w_`^^E_Tvz@w3(Q#C3&NhenPMBLc>>6{c!7Ok`UweKRquQQTv;GiqZzHq z#I$NQdv0j>d%sK48!MwZ#UY6@vI-o7$=9RP9JNZ@Vg~yMb;E)=yl!HiA&R3bIh``t z;iN}6+un!vL>o3*`R|CmR=cMikSrk(c~GlOH&}G6^aj~-iX`JCRO4S&ZlgzkXa$9tZ8az z-gH+Vt?2NkmxzBb)hp`y6yX8k9R2(T88aHrv2x}aPj-hSo!k}OIPxew3#GGncAl=kqgJ%F+<>{;&hPu~#cT6hD##-lb^4VC?m<)Gtik)qWSFHg} zJcMYu8aZp6Fwi#wz>s>XNpvhn1k0r-dyXUU>kI9FpK*U4F}cqU9zt02w^a_yPh`FGyE~ zTBfDvTxG#;yzyR(dgOx(9J?sk1FMNX+blonMajUerXE0kBoydh`QvA*Y;Rm`qN-AE z-YwR!>ZbT66ZaukD>t{Y3E3*&wWTN{7a|eU=xutO?Wam$?H|Ags&8>vMx`=(M_m|) zZ1(KuNmo?#;`<^ih$}`2dSTHJxAZd=`{=HKNKhqm{k_)O`(Roaw7{|Ynz-eFv$OLf z`x>EO+)t;gpTls&@EKnkqcyR@lCfBTTyjlBXw z!g14C1!tm*r9M%U96s_(<>8DqWkzC*%71qRVa!@H@c4atzGuFaPJVq%t>D3g#3TW# z^Hdf_%H1bpP{esiP`@jn+I1mF5TDQM2Sc@=IUxU1%8Qobwu@FMe1~7F<403w31;Tt zl<70~pgrz@@uIucL`xXO-y+!5lQec1A38c~$~5G2DY*A0E#F|$aT};u#*vii?n*yta zyH-9iY-8Ad8aT)SlotPt`(@?JvfDtbuf+39#mVWIX}$~_Lh@$xsCLRlQH!9}j4 zWlRPAKW4oKo&ly?0x7eb#u+b<1ng~~Rf4yrqR%0;=~KOV1JcRzK%)z@}~HPjHi$NH_sl zC3&yaEkF3|&cz=vO3d!l%)sYGL_RS0-U=mji+#51-ptInkp5}Ry7_E=#m!ra_CLf> zUMd2UX6@M+{XCCtfH~L?#vh>*Pv6F{21;QI71d1Xp}+jf4$S_#ZBCw|Kdj0>0v8P8 z6e#0gTaYQBD>;Atgsg1@PzZiC@Av-5USD{A?G#wn|F_e5>F00yEkP!w0uZaQaAF-C zk!Uny&{1Y*2n~BLqh01;43*4<^%WDbU$-WxJ_BZCouLkhhJG2>MWKkXH^_Wb0bDt} zP`zbQXj`HEf{L`?+D&PELI-^CaqE^5&`LAfYSA$3d8?eT$~`M6SJAygT% zDOluFqKVLGpFkW(9>4I`Jd#gFU=RLqlb~;w# zHiq56J>hil=J3~mX0T<24@pn}}W&`|l-Fqb9545iwy#V2_>>$Ec zcmA>GUb+nA2qdiUng_j-nuF(l1s4P=Eo`0om$lhkBr_asA1uw$D{DG_`yjYrcfdH^ z^BF7|*YPFdD80HOr4Pr!4rO9!HTveoTWA&F3&hu_frf93X|_1LYR>~_75k1vS?#c1S?rrmsfvK9W8$mCR;0PhW5UUh#i1@b@3UgMhsHUqI1Z+;Z2K zUBskk=Hyf;ARrJE8yg#F@^)K|BQ2B6#bxmhZesrrQ`Dp->l=2HH`hxEVg?*Ip9WFI zpl~K04_d<^?1m~z+5J{NS}lyXT3OcJ3zH8-p7^@wH=p;x8!S3HxNiI3|HUzmH3xIZ zy2a2Mf>bb!#N`odhf~s(<3JZVH6`|=DbTNCb`qtOAJ?|>Bpy@YI^9erW$y5{aWL8e z>f3C^TS^@_%RnFVGoXl_b#<58qYseU1Ox?1i`fsoDz~O2zoastw7vTBO?uv6r{Y{O zFvN~a`N_kej#m(dkN}+Y;RYC@!>-1z%^{k-+X1R>@b+oDT9rM&ve9Co5}8Qr|At6T z{^aQd$Mjx2Am1H(P(B`cp5YDehw*oBpc6|8{|NN2Jp1^3z@XO|99!R|`4g2XEOD8c znScED(gvUt{0Zcg{w=KEkCJEco)Ysfs!C@|xweTZD&o4w!Lb!^f`&cyw7D$j`(^q^ zCOn4Hd~;D{9N#U#m+c{P3H*NXTE82;M2JN-;Aw_-meD)LfUPef|0ZA~jVAWNcdjp0?g;TI1&{Jr;4X86-J!pY+Of*aENNfB^DSaACpW z>r*dsi%3a9^D7A1VxS;xfuiu~<(p(ejjG%nZ7t` ztIr&?rMjO(UeQ3+n zwldRE5AcS7f(x==x%Uh6{QV7C3oHbca4Ce&8-OhW^k|>MSUN8_{>~rpAO*^+Mz=k% zDd4dWIq#Uh8TnUA1Z)61Y_dk?>d7s1-8veP&)^N)VfWm=9r@Qw7Slj5fZ7wn{EdHf z%Nqh{Esj_CO8ypDKRyQZ>;?8zfo2R{d#VaBTP^en$B%~4-``MZ_LRTt(@R^4zvW`H z4>WrUy@&X9RNoTxKY|s^EI}hss#*rUS=<4&q@kb#<^Dyc({kUA|La@ zetx$6M@ZfO07*lObg{{z=B|Ka7_{;Jx5`KWX?xA?JdT8< zBz`E&*A)SDrv?spV`-qMxy|;{8d(Ef+KsB{pX@ObVg=xEq&!>v>2A3;MjB3I5Sznk z08P5PSqijx=IgkfV>Fb#S;`4Ig!UvHa;SA^HkFZJFJ>TH0*ep;?kJiLesBKP$ zEcA}Cg8>0GJG*GesPoAaa#=J3X*66BU9~|f5El08c$l4YA}%hjdFK)1D`@at{Jnek zh7|ev`ESwJ_MX<(!nQfliX<1>WDC?}a$z;-pohw5_syQ!Q4}^5BZzEm+RODY5zq#OnXhzB<$!lD^>rZrgpO&nOo63(-}qs zXeV4n=@P}ru{mP(PG&sYXxM0F8Xj1CpDx0b0Pwv8x zMQaIxmYkJ2{G6bWSU470Lm`63#hwspVoBFh!p+7b1{2nT!VmMt)T4bTj8lpRPijUz zZ{mh&rXH|j)^x@sVMdy7wm&SsK4ji6A_nV~FgRf{JUumqDC#s*_Vcu?8kQoP-`Nq(UELfBnGv`UONn!s%&4oNUafwhm@}-A6YJlrm>N5czE& zB_Z|n-iNIm7%|l-i+)=9WoP}_(r$dCsH=mJ31*m6YuZ<<1?Er5C)e1{xOe$>+bPcV z3gZ(}+Za_skT*1sCEEMWEpckyH+jUUA*m?@Q6$&L$r9(9>_S+s>$X%?%zlxPKS4?=D`^(RT@%foQwt-W z+Iz|>|CI;C-|N<`3KFYbzT!ZuT0%m~O_D)k-b9H#hzsXp%N=H>(Y+?iAK@1YGQ1fc zV)j~P>*87k`Fqxl!d_?`(<-JScB@N|$9K99yg#a1FLDa#=zjXB)-4h&&~5SOX?LS*-tMC65$?or<%QSYs2%j zKDS=21Z?9k8vr%fRzVh3VaBoUUzPZ&Oz%-^(A(_c`6rIIfQ6sC>zz)X<~ouG89((_ zP%wU&$qe~uhA)c~m1B3*-r;tiJ@v&46D!m!ueJ};X;VdKJ3Saqa-S!V_?vpET69!Eh%fn z3vUe*nweWY?|*yZQ(N!KOO&P8hyZW%BfG)w712^cdze7%DynWE?sB$dQ$vhJgLZUu z=FkAiWUo{S0fl1G$_b(No|AAsrIDI|fO8${v}CP-VkpJq8L@ARY>QAEHklTk=zb)}x~jFqZ%??{w%7Jzb5V|dyXn{@ZpCaN+Gh&kDd@B)9hz~fKX zo{P2N@=NbG3x}J6*Qub*xFcy9i2LHc14AE!IZZ*j%vd0B!N?liuN97XMwh~#qrtgd`tpn^XE1W6}EzLh?drNv8qzX;(ro8JOuS5?C zxBFy*Mw)0uwZn(!&%y+<()_$!Q}^7&_i z@*UmDMI2UdBw9Ii#DySLR@y579oYU;b5ip8gy_K=hES!!+p5zot^GHce=#z|A7zt$ z%8o8Y@QYM%+=gn}2^}F~_0%r%rf0UFrPZureSCWAb$LZxy|b2IZtTy?xhbwW^wQNE zQErJgmhDon8qyCNZGO>^AL6W6qf91G*<#`|nfNmh`8uv*p9&i|j5I4rHa$)7)tOG7 zI3GvCW(w5VLdy02@z>hZd+uWlUoH8!esUVWc%Z*5PbQP6e{r?lA07GEHT=1;h^Xkd z6Tj;uP^5`3gR_J^9t= zxp4m~GMun4BI-FPc8>BXZ}*)k?km2KB8YW7QL%&(7p)$wxL`({&659Fycg6bQqCde z-%WkuI=tEV+VWvL4cNI?&6OlS7osKLUeT4(ygHEv8(yiG(t|3G^+G&9Z9CvmYg%)+ z!msA%L0w?>*1p=i&xPS6tj$GbdOih!KUeyUi|0W)2jXbT1AfN6#@lmaAq}SRIp9DH z4Gp1(7;n0e*cWpuT&{>1yYaKZZn`LB1V!F4-sfVb0i+YVxS$utAkP=@3Mehx8R>r$ zjpwJ`MafX@e5HF}EeG#P$0P9}ZtuSJ_$xR#gY8$_{W~wwKY&BiKaEn;JQ)vm+&xbJ z?!N(t_~}lN>v$bKcgXR70EY~JjC%fa%kga|{v9MuXlQsii0$fqi-@>|7W@7Vz1`mX z{tG(q4`X>eO7mnmY5&j60Ql*XyZ3zcR)1@6Ze#pa^t;viswcY`1Dj$ro&MxilMQ_gm z4K7Xw(V8pG+|Xr(xejRTW04)b;QX!oqrj~4C&M?>Vu0k$`j%g>2!^e{%(~za`b&TW zEzu@3r>~7(;EfiAPjo`+e$@}Y;oFy)AS%}DLc;!l9Quzg_AV4C1mZJg^y)%;OTd5= z^G$zBZ`v~w4}m)T`XN0!YE*6F@A@^F%Wbp?0GQyoIEaCv=^1|hYW@#Fr0M3yiedZq zII^-C>agR(>M&%7UX^s~6aT-IRKMz4v_nN{J5ZVSKOO+<^+r%c0;!!pP=;#W>x*7$Y#kD6D+ zN3ddjA@b4%M6$ifu8ud{b8f-Wk}=|jibhgRN{bQ0ncs~a_v^U>Wq*g_L1OD12gK?I z+THp|fn>GcEtG(O1d`H&pQFy~0rpV5Df0co*u`on)VSsvkF{6Q%5x;~`y#h@AXa*5 z!Syk%)A1ytwys5ue_Ka!YyXuu3@mv$$HtGa3Ege|ni2B1Ms^Vtl!=z;IyTT*D5Yh< zNfp+%t>5s^S2CI11e_F9Mj)P}qt{NRf>ABo765VOI&{7V(%X+`y*nh8+2Z8cP^P4%tVf08n z1CkzG+Ith}@CqOSNLjG&8E~Q_cHYT@q==b*LvcF1a?}v~{v^*!mL8x~1ufbyZ)8i) zDgR$?2HgCAVuHdOggt8doIdt9jUQ_(zVnpo~L_Yfw4 ztQ;aX(@I0b&W+qJau}+RFwS$MSEhWH|7E7*tFz+bI<)GYO{`5?pm9QB8HG1!_tFCa zQhU9jUV4J=k0Ugb3RYaXLIb!prj-airmlQ011Nr;0S@cFPkXF!Wlc8ZNKCF$~3&M8BIfAMKMPf3BZ;i0!JhZG< zy*Wu_Rbh*IYdmTygXh838_>&!$Jj7&ksX)IaKuNM`4d@3*lcmXO9{ljFzJ#i6U0ep zx_PH9muzHYWbn;6tEUXddKkU$!g$TU@6NB=2uiif33=3_*)H+BV?PdIQv9gFsQD_V zP8QD*N4d;0EQKfT1t>E!H@9F(nrV{E;>p5{s5|?Xv+H!ZH*1k^Ydq?>Y}BwAbKT9x z0+D))^wE&$$AVF}t%|T9RJ<+qWcrxjWYRPD?1ku?*DdD#5`{T-367*bHo43jxltQ$ zgY@L3SRJS1l?_MZ^w^M|_L{63pt5qDO*T^~-^hW45Yx1m9PTZE!QQS-k{{)fO>ZI2 ziXSOESCM~s*2Y#!qNB>qH)$_qudc6D4?2&{#O9H~@7W$k`Zr@Uo`@1armvc>I_U0h z^LF4^eRE#lqPc;?el1>}MCsIJbE>30_S$@x7N(+iY_{1Jwuas8t(ex_6Imojw9(2@ z&@b>FnpWen5bN*-k^G9PF>c{AT3(&e*|o6hC$ti8Jjbkua+4Vrj@gl@CNNl}?ftz@ zx~U*l|E3c48eC((0s`Rq$J8WF6nR~eDsM_jRmmT|^T5MIonENj@=<9jVTE3#{<9^4} zwDMnCC<{YNJOFvEZogbIrDq-e%Kp4I5zTP&=tPRUsV8WQs-Iu8L8fiWSyC@#_nrw7 zT`h8WXD7uqxpH^ibM z8|kIOo|hjim&!dCE0ae}dv6yGp19uTY(O9vY;-MYaYPem3(_gJY zdT|ijd*%`*ia1H+>1C@SQq*#3O{U-A?95&+A?@%v7>A>)cd^XxI_{n@LY~8PQJ7pA zlG`bPWc324QzJPGG5&OBmxGWgVPg86adnss(==(q6Z3(w+x{xY#s0vGAr+tTSs9^$ zfjBQMo!O8|0a68XvBA8t_h9#2xr}Dcku~eqz1{YgIaorXW%RJ_XoZ=c0!M!#|Gqf{ ziM=I?G|oGh!imBD_Ix0Z{aNRDNuTT5TwYj1i5Ag*kZ=y;JjFI%O^WKXtTs^I&@}F$ zG%WRvtp+~o^OE=DOi(guc!AVNlJsp)nkE}_u;CV7Q#_7OtF5PR6kDdas()%2aGZ~d zkrj2xN&9a&~-yc@1fqpZZwrteq1za6i<-MqLh02 zj}(Wsl2KL2B&R51dUF3vs)E0(^!hQofWYZ{`eGlvqE2Pr$ST>J2|BVU%W?Hy* z#-l+t4s7KtTH8_GH_O7*d0~7SbrnSEg?87R4GQM;^H>ewTnT`WBdh_KZP_*M6p({K zB^OW)6&4WfF|?kmhV9#reZB)vTN}(`hc}++2`C=U0IezKJL3VyB4~?>&+;c{EwA1i z#evXZ^E8Y~76V7Nz3l!?kZmGvO4B{NEDdfzhJ2u5RIE-aD;oMDF(xzc_@UjeyGJ?K zUvb9l1_3}kE%Lp(>`z^e>PvO1XT>-8uQ%jW)|gLBfr-+h>As1d8R zyA`g0oTzO5=dgGjj!MD7K=p30uIS*#3Jv(2rYYQ#RX*lv>G?U3)7Zg#U_fto3;Q`L zcn6XONWmY1lZ*ElDzi5_JPXRvdL~9eOJqG%gZ#`@^z0hCz>Ojfe?G0LQJJaF<3&9-y0 zifr~prY<>B^*Y;z0IIzZtxq%NdN7AxX+XTGmkntU3aX&_^4n#mbKwB9Lr>Io$h9w) z)&sXk>lns*tQU!Pjon9sWL^b0?D}W`H!RG~g6=fZG_lP8hZ2U=mZNlVZE z)(G!(qc!h+WcMV8=@(ZvG-f!q;Ze$+J_Oab%@fI7v#R~twe9i%~&WDC>S z!XNKD>kU(7tMIzOHJ7B{dPln9Rb7ierO#j2DbY1OJl;d?(?*vfMTaEs5Dv=rhcsPZ zzD_@tp#dXwrERzzW|=Cs`t&(6+$a6Pi;DftL1OhDG~5yN=X=7}d<*J~*A{GqyW%00 z&LR*jCT5g&rX3#LUCcB)culj)%E+3?BEaOZj7vq_|Lki{MCHWeL zL(dRGMnf&Sg1CS0>VwFx&{`f7s6C>HLZuOjxrEuQ7V^ftFcrQmDl@HCb;Bp3?42mw z{jduqd=cN(-d0GI@*9;xPrQ#syVc*Ax;XTa$^G2qe7a(cg#}q>>WEd5glMz}Mo!gx zpj44fHH}DXbL{yp@Ypp{&eBpUn?w1NbqbE^E8*8#rCb)RaJ`;InrC9xLBL4^Cpnr* zOB)z^U-z!S85Dv4QE&yOzrJ_u&-%K5TlZtGIjA=tglG<kQwH00Q(n2e7yCu!%?h>;U6y#?7T2@yTG!)^UPp?a#FMzou8xrANYz&^JYqHp zxHDQZzfApl%-Z86$ciBLrHK)#_W=`rBK#Be#3) zVpR3890=*)Ra|YtU*sfTk%wEpY<&hgBIT4wlm{|X08p^TP%K1Wc-4nXI=)?9 zChN?zjAe!w!9be%3((ZHY)QfBe9v4{F%{Kd84hwz0lI?FO&y-4m^NKT!vJ7X%?v;;T#M zXGCMed`8Z>nl$M5#$QtM)9Z7M(q;X{*tu1LH11Aqvs7bVwlQMiUu~LZ>5>krnfz0j zk2*`G)DXlY{Ay{l%x7&S*(Nz;F2$3^u!2lo91>#(2wgnKQBa+5Vl1Epuln1&e8L-aCb`$M@c782#+#MXgq^`U9J=mv$4*t~!n z%K{sSwC7qPh9-&KxPZk-DDsp64EuC(wDSn{Va>`zg5~47cIVWE9EDIBF_Ix3o zRLCc*&}%ugW^5iP~awTMTmZYbYVnJ+m*C%P4c98d2*Q4_?TcGijP~%x4=^X`dg*($FzW zxD?9))q`>)hr-|cVj{B?NH2O=>sZfOUXaAhHE_Z-o_AZrG9xjF7=(&ukR*!*;yGly z2RP6Qn%l;GX<$q_Az{gHlZr>+<4S*ExemgWv?fG$_gQ*43MHj&cWArWj5uTC6756* zLXJWZASX8=$t~4(xXw@|8DG>Mo~AtG>(Nbvb#23Kzo`y!qas2^lcnR&HNBEdDPUJ0 zz{MB4(v}4I;9k-|<-QA@*D=%ni^~2bhK~6~jY?e5SxW|P>nH2rxOH=tg!AZhPBr(V zS?`bS`>RR{s&1)0Bjvv!mBUn<=ir*dH{8jnGLkNT#>v@g-e+Q`7$Wk#Sri0uR5RMllJN)&&n4c#Psa8M zj0rxH^Q#p=dGLSy=Nwt#U?Z!WbX-PC)1}KJCq)z7xFK!7XtK{tsh8C`@2a!G!7tmG zpMvaqHx+30O9ff&C1K|Vt5?8cMyGOi1CvA}J|1RDL5RPLju`5SeKs}0YYTKG}A6ZVZT8x*+`&}j5+9=OgECz_VsU$@?+0_T5 zv}Jf}@O$D*WLf)e{3XTfG3Jg<=i-^)SZ~t}${EP3v+ zW`pI%PV~%4J(hlQ+*wlkj5r>g`zOz>Ma^f~o$+7dPkU6cUt=|DrEzrX$PL*u7fsW| ztOm|FR}b0o$EUF4D6&l-oH&bA-z^g{-b9;=4v>;$e*+=N)K2 zch>3MvEY#CI{W6h6=3?=TXAX!C7Z>cdojx2Zm1nMjKG)%mXkhN87bm=ObTEn9e#10+yDd}gYE$ir-9+~L&M(`EcFbuZ z4TJM%pjyd3^G!Xsx>Q@Y$*e`TD=~dW2xy|jL#n+xy<82}YrGjk3S2_rkaR93=Q78r zi))6*uoZvtMZc9c5`&DKNU0p`&GUViIMKO!E4*|)rGfb&5i)Yp`eBw}i_E;w2p>m9 z6>p(y<-kzBmyrEUhpE^lF{{SMlGg9IMI$HXqZ2R_CqTAxZJo1ek}4ra!l|NS9}z$2 z?!GfhNd>MDub8!v!rj!T4gjmLIev1Yh_cf}bsqI7mmwVYT-^B^yS95a%(S4{|@>#k#K<>KMI{YmE_=8AzqCBY#bRlInueObg-F7uJx)U$(KOLDz923NMY zqiRMm@|e$ALn{uEEsp!u%3NN@rD==^x$|p3+D5$ zmYruQ&()vzI7RU@)U4x$p$*6$#)VL2X4|z}ySpUEc^jG+UvS}M9H-Lob&VzGr&z4) zBBMo^cN;LP88;-Kp7UR-`DIN} z=UP-9SubXC#+3;`&s%q%gAC)26?^)Qh&9ZFS}hopqr&~yM{@?$GuBaX#qf-p_&LWV z-VkK2;^-8YPjOhNRrKQ~bb1Rb3w}qSTs)~>m{%x$(8k(%JBE60|t76j=76_DD8#+ljj-U4n)oJr&$V zn;6wX?#ZP^^R&pj@6E_^=MfKoOcg}e>7{r4l zHFN#Rl#e=K%Y#}f;nEaCMHW|g^<#FJI6J88A^q73re+<}^GaG2F?Nx{+K5A_v>cZnXp3sUIHIr|q*0X_ofuM`yc#8* z=Dh2sP;2SgQxQuip-0o-#E`YvGq%Sx4XxKWywQsnvM#VPmt@JGTIBs0>(QiCM2V9D zvw}^%L-InM+t>H~0oS_%=7De%gpBZPPpb1X=iz8BiC#6)BI6e#&R^Qa5VFd}Ta+#j z(UuB~QJ={$#@$Nkh;M-Ncz|sm7x(#u&Za)3joE+$qUT3S!=H!K_rt;bMY#+cZ#apC>QISQy5Y zTvz2QNxO;|HOpsEr;4)Nwc&TZa4oDS`;4nk@c#wh=j6z!AfCQ zQDnZk7z+~~*z~B=h=8b$Fl!Fa4KdYt1wZorse!Bmum)?gDIx}wZW!i{xT|5Dfr~3t zFNkERQYe?2!xg2{*t3MHwjl^x zM^f8n*EMw~SVy@f#o}(l;O3Wy`n0MneG)ERBu|DsNlspxuGyIGd4FQMO?R)&gcvev z&bD3vXbHKIM%Q`Dn5|jUZkTeb{xx{(j8o!j92a-daOn2VrHmnrs$&R=i&bovef-AfR0{w<`H>!mgfC1FfBMMBp=$ksLkVmvLl7YlVB<|hVJIECk8JS)iF z`y1a)sH*W+BQ&WpGR)nM_iQ65}A0(EyX(|i%&#|{Szihr_Se@?{#Kl;Y%6`;Yq&35wRej8N>f8+@>Cait z2|)tZ`Ibe(iZ+)P(DH))dToA9T<|~6L;Qd3X5nC!%S1i4m_jc5r6-Bf5YVR|K8ZL(*;d+YCL0N z9Z><7&3TDtBO=6W9Kj(4>nUzxL>u!$d8hb^h4;f@Z`XAE3T*U_lMwo-Ef2+btP{<^&JvVVK%yxk|;{TF2~}wtSD(1h5wHAkxq#JnNmkR;sz8k;I#e4`kku3Sr$n~XWBdzyPD+2!)wUJ6y?!ACmNn5 zuQr%|pc~z7fi8jB48WgWb?8b=fEbU}Jz+O;HrK%x)J;YkjowlJW#UrAB9a+u)75F+ zVlV;R8)B4(mWFOh-cmxVB^-;g6CTBv1amq*GJ&6jU^}>4JC`9fVxxDfqzVy?V+#ZaXIOBa8h#TkOXN$hjDK1HpjRyo2fvfViSXVa9JXAzBBCh2Y3SvKL= z8gL%=Sr`;J|Y0!eWNPM!p2<2-Z)A#qqf?d7q|t zf|FW(x2F*All5*b%C%U1hK^W_2xl8?#CS|8A7a zd-y^1Qwc$5Lhi6z!VjvQ(mlW@-VvF5lV`ol6Jx3JM}>g&fcSn>8DE47W^^@$dw0Kj zv!%H(3mWlDlb3V-d9gWzqDL{Okk`}#5kUnCjrb4Hv{$Cp<;1iY+5K)ZAvvBn?;>S$@&RIh1?jv+^_`pb+ram5dS9cl?T&+(Oxn7z}u z#{3ZRyw!jQMNyp@N^J3Ijw@HqI=lNrowZC#$V5db3hEujB<*1xG54+)aXf^Y{3@!v zRf&t0n#>&=c$;ZZ`WI_>M<-i=OK)dpx?j|9%~51^6P}O?G6_-qKWPWu4m6YHbNsFEAZ8CY*-mp8R*-L{$i<&io#%sKOLJu#I%G`Yp>5tQ zQU=VytFm*5GEvb%1=WNJ@y9kbo9?TYv<5MHXIY)$|N$vtO6m=4KnNxHOM>`f%#6Bka`H&rnqZOe};x`)Crqjqr z6VA1WO3XzmT#tH&lF;al=bnq2PoGUg>uQ!`1aA`x9m3kBpYZYS7%Sw`5*u%cavgU| zWVIg8PD)mZozt2Th_vvGi&3=b$dwgMp8In|iisqtJ>F70Ca#S$_hmD=yzpgCLvssAXGk$O}7ud)SDSXsmt# zE`FX@lOQ^?5i?{si2Z*7j&}3Opv@`QAvVm<^@I3!#tetJM-Z4`Of#ve%ep_TWjyV_I_CF z_g~VZd-9(lx8IDcJvvbGy3SeH^(H*+NP5=3h3^D@Br+~CNPXAhE5rWrSpxj7x<&Zg z@RPPKg=REkr=#z?z52rv_Vrm+3!nv^Q)3(NBRyKbeCJ>8mO5~Q-0Wv;f~NP)&ND7w zy(|BcZ;R*rQsQrE-^8zbyOQK3?S{LPzr1dVWq78D4T|Ob^xsMj=Vc35RT-4n(z8$0 zvf?X4H%SP*?GLSJ>n1jUw_R*k+p>jZ>;(5QxAB$ryfeQ-Twci8^wjPLpByZskDFVt zz|tBQk@KEwoCKV+9(b2gWwZb=@Ong%qfeiW2{C58>f}=VjZrBZqwq8T9E*aGfn;xCglSVWZOvcdApL zNc1$KJg#@ERoWpa@%URX`e;Il`Vm>m*U{6aWmwdPVuvJL^s8GleobwwbX=67PYf8^ z=N1h7ei-v|uU>zO&487aVnw-xfbzJvgK+lr#r|?mvU|<_Wh&V;6!6(bjh2F_?fcN6 zC#jRrR8FOFB2ZZIu{oG2F=KSk^<=4dMzN%8C%6Qa(&8|$i5}@a>JM=V0qMQ4s`)2@ z(MRb+THF}!RA1au>B$fIu5%t%u`3te=){3)kB?PHqhK|V{r(BZLl>s5tn+1>lScVY z{YhnkEW+})R6!$peN$p}Yn1paH8zVWqQtnO5|vLpP?zC1J6b^P{bSvq2lvre{HFwQ zQI>}Le;VstJ`vhSI>ViFeU)wm_w;T|s+fkjQ)pLW`pnUQBnG6?K9l{FS^Ayh+KG&9 za$i~rEvKE1#4Y}!Y*LIn`KL8>O8W{7P&=6~PvK?}l2h{zV?s)avat%jrDs6}J*%AYUyVg%aF9E7WBA6!iNf>ul7U zcO`G31bG0P?mBnIy>75+>~j%ip(u7a;q6>aD8hn%tr_;Xr3RYL339xSZBr#tW~S^J zkg1@wN=2pJ>blQ?6rZZlhoI@h;qeuW1+J8^X?1BYNok$md|wJH^%Z`6lfi z8gbf28qQr43|NITbP{`cGqt=`^T5m$Ip{nj24R{ zsP+9nPE!41K;LY&e1FH9Vg8U@1#a4aSY!REXvBDiyxE!B){frK?H8XRi;%=vND z63VB5aN1C>QuU~W0D}$?7AX1_`ouWF=u(yj7VyyeqGVYWK)=V~?8`YlOWF}%d`y4Y z3-+G^!XLdSwUVhOB!}Dujm-dFKdsTBm49uNV*M>b&9@YS8xAMqWO9w$nVk}msFt(I za4Oi~ExGspRep@dO@-Y1DaP?SjA`eW0=gl2HK_$<`6Tjz*ZQu~QZC8hpLl~G2LITt z)leaed!2=HmM@ku&sS{eRSfjG(9xmF7&E@5?0_uTRQ3*u*8of2elnE;rSx3b z&uSrBU1S+zQQK@Yixr|)HZYxkngc;a$wJ(J>(DWGBYFA7vhDM1(ZUejCwa9~$LE@( zQl;A5?YHl*18o(AA-1=!^HBj-l8g#{bk0GEJ^k`T^Sse9)}GVgR`a5}Oy zi#C`E!|x4KL6*8M&439pzr9v{MYw1&(_AEfF$vvaw zp+fHsko?x4>ZcMT98>Oqfy+koQAegsKww@VGmUBKNxp{^)$Ee_-A2Vwr_mREU~oo4 z_jJ)L%Ghmau{fkS+bt(x9b;$3(l;;C0o1!Xbp>2|a3)iJRa~uL_!S5Wza>H3sEN_`$Ob%IOE!*u$FQz1+Hw1G2wa>U8WciQM$ zhqU}^tS>pdx+Nq%B%gwcTewLv;E_IzEW8JGq<2-<*02h26D-$(Lx;+2dvNl}5f??A z8AjA%4rR4fJ+MQZB)COlgmu=g)sv=s1P0A^lF^r$x|X!`yFpZ;TYPHO1A}O_+0HNX z-k9}Ep?ISmQfq&KO=Zn+TYH@_>h8dgsndOrg9r{(Y$g9P2MkoYJI^Ls1x+;ErkI zr_Ysp#Lp3r=GO8ralBZC;zcGegMy!ipeCt;g%O7vv@-(s%2nm(!=UH#E6uWthb$!0 z06;;1L8O_Ta=vWHNhW0`WWJVUDZUsE644{eZt!P2bwJPKpLXoS1)QBRJZ3~iJIgMk zVDRf5TN>-;2-p-j@BYDKZ1ZP6PJ&P7!_q}uhhiDXv_lvtDCr*JA-!h%B3Nq=AHkBk z;@Fa*_|=9TUi?4^G@42DFWs{9vV8C zw65qG1KZ@|az3TI-~kdb96W3-5R`g4oG*{_z{Faao&V{xV0Ceb@Nt>4%YaG-Z?4&K zDAN1aKtu4a^Nj@cy1X_+b#hkM*Zh)*yu@9(CqIO1NBrucA%muWDVNPCPP8#xO=U&< zEQH+L?sA8EqZPne`5@eQ_JXPbO4)kZ)oKt8<)XROgF8>`B(G)@^rZRgTF30#?VLMD z0G2VJq6{A8r4_C`xRERv)EY`8yt4XJsyOVRcPf$Xt(3BMA7^ZStf&&_HQDv# zzESnMU`&TZ9fK4K5j}*KnoddPoY>QI3K~Yok6A(wy$gKI7RHDt3)pSqRls_VJL^D3 zo%<2oq_HKVXb4f6_avtsU9bz8+6W3=4=c}{JhxWKa@378sMmk#hx=dfips*#3m;a| z%S%o<6M<`16Eak6!B8mHcr84mU&V*i)MGBMRjd3b>cc1y;@9F!Fsk*XNZ?wi5K_e| z)yW76qBRV>SgKmz0di(7I%G(Nry!HYOV>h`0#$TL9k>79T3h(H`~Tb1|MvC8EzO~_ zr`l_wVjKH6pTI6JOcxz={8PC8KeWJKCbd+RSQ_f@TkwAy1mAk>XPD?wZkTdSeUr_} cx$UcJmei^zO9S73R#Of4)92l2CoiV`8@i(pF8}}l literal 0 HcmV?d00001 diff --git a/chat_template.jinja b/chat_template.jinja new file mode 100644 index 0000000..f641c60 --- /dev/null +++ b/chat_template.jinja @@ -0,0 +1,15 @@ +{%- if tools %}{{- '<|begin_of_text|><|start_header_id|>system<|end_header_id|> + +' -}}{%- if messages[0].role == 'system' -%}{{- messages[0].content + ' + +' -}}{%- endif -%}{{- '[' -}}{%- for tool in tools -%}{{- (tool.function if tool.function is defined else tool) | tojson -}}{{- ', ' if not loop.last else '' -}}{%- endfor -%}{{- ']' -}}{{- '<|eot_id|>' -}}{%- else %}{%- if messages[0].role == 'system' %}{{- '<|begin_of_text|><|start_header_id|>system<|end_header_id|> + +' + messages[0].content + '<|eot_id|>' -}}{%- endif %}{%- endif %}{%- for message in messages -%}{%- if (message.role == 'user') -%}{{- '<|start_header_id|>user<|end_header_id|> + +' + message.content + '<|eot_id|>' -}}{%- elif message.role == 'assistant' -%}{%- set content = message.content -%}{{- '<|start_header_id|>assistant<|end_header_id|> + +' + content -}}{%- if message.tool_calls -%}{{- '[' -}}{%- for tool_call in message.tool_calls -%}{%- if tool_call.function -%}{%- set tool_call = tool_call.function -%}{%- endif -%}{{- '{"name": "' }}{{- tool_call.name }}{{- '", "arguments": ' -}}{%- if tool_call.arguments is string -%}{{- tool_call.arguments -}}{%- else -%}{{- tool_call.arguments | tojson -}}{%- endif -%}{{- ', ' if not loop.last else '' -}}{%- endfor -%}{{- ']' -}}{%- endif %}{{- '<|eot_id|>' -}}{%- elif message.role == 'tool' -%}{%- if loop.first or (messages[loop.index0 - 1].role != 'tool') -%}{{- '<|start_header_id|>user<|end_header_id|> + +' }}{{- '[' -}}{%- endif -%}{{- message.content -}}{{- ', ' if not loop.last and (messages[loop.index0 + 1].role == 'tool') else '' -}}{%- if loop.last or (messages[loop.index0 + 1].role != 'tool') -%}{{- ']' -}}{{- '<|eot_id|>' -}}{%- endif %}{%- endif %}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|> + +' -}}{%- endif %} \ No newline at end of file diff --git a/config.json b/config.json new file mode 100644 index 0000000..53d6654 --- /dev/null +++ b/config.json @@ -0,0 +1,37 @@ +{ + "architectures": [ + "LlamaForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "bos_token_id": 128000, + "eos_token_id": 128009, + "head_dim": 128, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 9216, + "max_position_embeddings": 131072, + "mlp_bias": false, + "model_type": "llama", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 8, + "pad_token_id": 128004, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "factor": 4.0, + "high_freq_factor": 4.0, + "low_freq_factor": 1.0, + "original_max_position_embeddings": 8192, + "rope_type": "llama3" + }, + "rope_theta": 3565775107.2609234, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.52.3", + "unsloth_fixed": true, + "use_cache": true, + "vocab_size": 128256 +} diff --git a/configuration.json b/configuration.json new file mode 100644 index 0000000..bbeeda1 --- /dev/null +++ b/configuration.json @@ -0,0 +1 @@ +{"framework": "pytorch", "task": "text-generation", "allow_remote": true} \ No newline at end of file diff --git a/generation_config.json b/generation_config.json new file mode 100644 index 0000000..5b92e42 --- /dev/null +++ b/generation_config.json @@ -0,0 +1,12 @@ +{ + "_from_model_config": true, + "bos_token_id": 128000, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "max_length": 131072, + "pad_token_id": 128004, + "transformers_version": "4.52.3" +} diff --git a/llama_nemotron_nano_generic_tool_calling.jinja b/llama_nemotron_nano_generic_tool_calling.jinja new file mode 100644 index 0000000..9810c15 --- /dev/null +++ b/llama_nemotron_nano_generic_tool_calling.jinja @@ -0,0 +1,51 @@ +{%- if tools %} + {{- '<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n' -}} + {%- if messages[0].role == 'system' -%} + {{- messages[0].content + '\n\n' -}} + {%- endif -%} + {{- '[' -}} + {%- for tool in tools -%} + {{- (tool.function if tool.function is defined else tool) | tojson -}}{{- ', ' if not loop.last else '' -}} + {%- endfor -%} + {{- ']' -}}{{- '<|eot_id|>' -}} +{%- else %} + {%- if messages[0].role == 'system' %} + {{- '<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n' + messages[0].content + '<|eot_id|>' -}} + {%- endif %} +{%- endif %} +{%- for message in messages -%} + {%- if (message.role == 'user') -%} + {{- '<|start_header_id|>user<|end_header_id|>\n\n' + message.content + '<|eot_id|>' -}} + {%- elif message.role == 'assistant' -%} + {%- set content = message.content -%} + {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' + content -}} + {%- if message.tool_calls -%} + {{- '[' -}} + {%- for tool_call in message.tool_calls -%} + {%- if tool_call.function -%} + {%- set tool_call = tool_call.function -%} + {%- endif -%} + {{- '{"name": "' }}{{- tool_call.name }}{{- '", "arguments": ' -}} + {%- if tool_call.arguments is string -%} + {{- tool_call.arguments -}} + {%- else -%} + {{- tool_call.arguments | tojson -}} + {%- endif -%} + {{- ', ' if not loop.last else '' -}} + {%- endfor -%} + {{- ']' -}} + {%- endif %} + {{- '<|eot_id|>' -}} + {%- elif message.role == 'tool' -%} + {%- if loop.first or (messages[loop.index0 - 1].role != 'tool') -%} + {{- '<|start_header_id|>user<|end_header_id|>\n\n' }}{{- '[' -}} + {%- endif -%} + {{- message.content -}}{{- ', ' if not loop.last and (messages[loop.index0 + 1].role == 'tool') else '' -}} + {%- if loop.last or (messages[loop.index0 + 1].role != 'tool') -%} + {{- ']' -}}{{- '<|eot_id|>' -}} + {%- endif %} + {%- endif %} +{%- endfor %} +{%- if add_generation_prompt %} + {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}} +{%- endif %} \ No newline at end of file diff --git a/llama_nemotron_nano_toolcall_parser.py b/llama_nemotron_nano_toolcall_parser.py new file mode 100644 index 0000000..e6a5617 --- /dev/null +++ b/llama_nemotron_nano_toolcall_parser.py @@ -0,0 +1,109 @@ +# SPDX-License-Identifier: Apache-2.0 + +import json +import re +from collections.abc import Sequence +from typing import Union + +import partial_json_parser +from partial_json_parser.core.options import Allow + +from vllm.entrypoints.openai.protocol import ( + ChatCompletionRequest, + DeltaFunctionCall, DeltaMessage, + DeltaToolCall, + ExtractedToolCallInformation, + FunctionCall, + ToolCall, +) +from vllm.entrypoints.openai.tool_parsers.abstract_tool_parser import ( + ToolParser, + ToolParserManager, +) +from vllm.logger import init_logger +from vllm.transformers_utils.tokenizer import AnyTokenizer +from vllm.utils import random_uuid + +logger = init_logger(__name__) + + +@ToolParserManager.register_module("llama_nemotron_json") +class LlamaNemotronJSONToolParser(ToolParser): + + def __init__(self, tokenizer: AnyTokenizer): + super().__init__(tokenizer) + + self.current_tool_name_sent: bool = False + self.prev_tool_call_arr: list[dict] = [] + self.current_tool_id: int = -1 + self.streamed_args_for_tool: list[str] = [] + + self.tool_call_start_token: str = "" + self.tool_call_end_token: str = "" + + self.tool_call_regex = re.compile(r"(.*?)", re.DOTALL) + + def extract_tool_calls( + self, + model_output: str, + request: ChatCompletionRequest, + ) -> ExtractedToolCallInformation: + + if self.tool_call_start_token not in model_output: + return ExtractedToolCallInformation( + tools_called=False, + tool_calls=[], + content=model_output, + ) + + else: + + try: + str_tool_calls = self.tool_call_regex.findall(model_output)[0].strip() + if not str_tool_calls.startswith("["): + str_tool_calls = "[" + str_tool_calls + if not str_tool_calls.endswith("]"): + str_tool_calls = "]" + str_tool_calls + json_tool_calls = json.loads(str_tool_calls) + tool_calls = [] + for tool_call in json_tool_calls: + try: + tool_calls.append(ToolCall( + type="function", + function=FunctionCall( + name=tool_call["name"], + arguments=json.dumps(tool_call["arguments"], ensure_ascii=False) \ + if isinstance(tool_call["arguments"], dict) else tool_call["arguments"], + ), + )) + except: + continue + + content = model_output[:model_output.rfind(self.tool_call_start_token)] + + return ExtractedToolCallInformation( + tools_called=True, + tool_calls=tool_calls, + content=content if content else None, + ) + + except Exception: + logger.exception(f"Error in extracting tool call from response. Response: {model_output}") + return ExtractedToolCallInformation( + tools_called=False, + tool_calls=[], + content=model_output, + ) + + def extract_tool_calls_streaming( + self, + previous_text: str, + current_text: str, + delta_text: str, + previous_token_ids: Sequence[int], + current_token_ids: Sequence[int], + delta_token_ids: Sequence[int], + request: ChatCompletionRequest, + ) -> Union[DeltaMessage, None]: + + raise NotImplementedError("Tool calling is not supported in streaming mode!") diff --git a/model-00001-of-00002.safetensors b/model-00001-of-00002.safetensors new file mode 100644 index 0000000..b061d7a --- /dev/null +++ b/model-00001-of-00002.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59ead128162f7a9cdcf3dbc00c46eee0313ba24bbb20c23335b9d8770162a9f3 +size 4978354640 diff --git a/model-00002-of-00002.safetensors b/model-00002-of-00002.safetensors new file mode 100644 index 0000000..eb3c4a5 --- /dev/null +++ b/model-00002-of-00002.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5693837713780d35aa19960d19e4d0b4bb34e2de8cedf631e7a2aea3c3b5fe6 +size 4047172128 diff --git a/model.safetensors.index.json b/model.safetensors.index.json new file mode 100644 index 0000000..dba5c1b --- /dev/null +++ b/model.safetensors.index.json @@ -0,0 +1,298 @@ +{ + "metadata": { + "total_size": 9025492992 + }, + "weight_map": { + "lm_head.weight": "model-00002-of-00002.safetensors", + "model.embed_tokens.weight": "model-00001-of-00002.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "model.norm.weight": "model-00002-of-00002.safetensors" + } +} diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000..3c1d049 --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,23 @@ +{ + "bos_token": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "<|finetune_right_pad_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/tokenizer.json b/tokenizer.json new file mode 100644 index 0000000..1c1d8d5 --- /dev/null +++ b/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b +size 17209920 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000..9cdeee1 --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,2067 @@ +{ + "add_bos_token": true, + "added_tokens_decoder": { + "128000": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128001": { + "content": "<|end_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128002": { + "content": "<|reserved_special_token_0|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128003": { + "content": "<|reserved_special_token_1|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128004": { + "content": "<|finetune_right_pad_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128005": { + "content": "<|reserved_special_token_2|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128006": { + "content": "<|start_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128007": { + "content": "<|end_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128008": { + "content": "<|eom_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128009": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128010": { + "content": "<|python_tag|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128011": { + "content": "<|reserved_special_token_3|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128012": { + "content": "<|reserved_special_token_4|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128013": { + "content": "<|reserved_special_token_5|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128014": { + "content": "<|reserved_special_token_6|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128015": { + "content": "<|reserved_special_token_7|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128016": { + "content": "<|reserved_special_token_8|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128017": { + "content": "<|reserved_special_token_9|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128018": { + "content": "<|reserved_special_token_10|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128019": { + "content": "<|reserved_special_token_11|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128020": { + "content": "<|reserved_special_token_12|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128021": { + "content": "<|reserved_special_token_13|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128022": { + "content": "<|reserved_special_token_14|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128023": { + "content": "<|reserved_special_token_15|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128024": { + "content": "<|reserved_special_token_16|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128025": { + "content": "<|reserved_special_token_17|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128026": { + "content": "<|reserved_special_token_18|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128027": { + "content": "<|reserved_special_token_19|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128028": { + "content": "<|reserved_special_token_20|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128029": { + "content": "<|reserved_special_token_21|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128030": { + "content": "<|reserved_special_token_22|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128031": { + "content": "<|reserved_special_token_23|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128032": { + "content": "<|reserved_special_token_24|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128033": { + "content": "<|reserved_special_token_25|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128034": { + "content": "<|reserved_special_token_26|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128035": { + "content": "<|reserved_special_token_27|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128036": { + "content": "<|reserved_special_token_28|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128037": { + "content": "<|reserved_special_token_29|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128038": { + "content": "<|reserved_special_token_30|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128039": { + "content": "<|reserved_special_token_31|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128040": { + "content": "<|reserved_special_token_32|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128041": { + "content": "<|reserved_special_token_33|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128042": { + "content": "<|reserved_special_token_34|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128043": { + "content": "<|reserved_special_token_35|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128044": { + "content": "<|reserved_special_token_36|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128045": { + "content": "<|reserved_special_token_37|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128046": { + "content": "<|reserved_special_token_38|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128047": { + "content": "<|reserved_special_token_39|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128048": { + "content": "<|reserved_special_token_40|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128049": { + "content": "<|reserved_special_token_41|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128050": { + "content": "<|reserved_special_token_42|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128051": { + "content": "<|reserved_special_token_43|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128052": { + "content": "<|reserved_special_token_44|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128053": { + "content": "<|reserved_special_token_45|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128054": { + "content": "<|reserved_special_token_46|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128055": { + "content": "<|reserved_special_token_47|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128056": { + "content": "<|reserved_special_token_48|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128057": { + "content": "<|reserved_special_token_49|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128058": { + "content": "<|reserved_special_token_50|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128059": { + "content": "<|reserved_special_token_51|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128060": { + "content": "<|reserved_special_token_52|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128061": { + "content": "<|reserved_special_token_53|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128062": { + "content": "<|reserved_special_token_54|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128063": { + "content": "<|reserved_special_token_55|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128064": { + "content": "<|reserved_special_token_56|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128065": { + "content": "<|reserved_special_token_57|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128066": { + "content": "<|reserved_special_token_58|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128067": { + "content": "<|reserved_special_token_59|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128068": { + "content": "<|reserved_special_token_60|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128069": { + "content": "<|reserved_special_token_61|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128070": { + "content": "<|reserved_special_token_62|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128071": { + "content": "<|reserved_special_token_63|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128072": { + "content": "<|reserved_special_token_64|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128073": { + "content": "<|reserved_special_token_65|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128074": { + "content": "<|reserved_special_token_66|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128075": { + "content": "<|reserved_special_token_67|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128076": { + "content": "<|reserved_special_token_68|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128077": { + "content": "<|reserved_special_token_69|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128078": { + "content": "<|reserved_special_token_70|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128079": { + "content": "<|reserved_special_token_71|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128080": { + "content": "<|reserved_special_token_72|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128081": { + "content": "<|reserved_special_token_73|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128082": { + "content": "<|reserved_special_token_74|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128083": { + "content": "<|reserved_special_token_75|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128084": { + "content": "<|reserved_special_token_76|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128085": { + "content": "<|reserved_special_token_77|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128086": { + "content": "<|reserved_special_token_78|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128087": { + "content": "<|reserved_special_token_79|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128088": { + "content": "<|reserved_special_token_80|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128089": { + "content": "<|reserved_special_token_81|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128090": { + "content": "<|reserved_special_token_82|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128091": { + "content": "<|reserved_special_token_83|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128092": { + "content": "<|reserved_special_token_84|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128093": { + "content": "<|reserved_special_token_85|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128094": { + "content": "<|reserved_special_token_86|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128095": { + "content": "<|reserved_special_token_87|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128096": { + "content": "<|reserved_special_token_88|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128097": { + "content": "<|reserved_special_token_89|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128098": { + "content": "<|reserved_special_token_90|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128099": { + "content": "<|reserved_special_token_91|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128100": { + "content": "<|reserved_special_token_92|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128101": { + "content": "<|reserved_special_token_93|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128102": { + "content": "<|reserved_special_token_94|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128103": { + "content": "<|reserved_special_token_95|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128104": { + "content": "<|reserved_special_token_96|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128105": { + "content": "<|reserved_special_token_97|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128106": { + "content": "<|reserved_special_token_98|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128107": { + "content": "<|reserved_special_token_99|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128108": { + "content": "<|reserved_special_token_100|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128109": { + "content": "<|reserved_special_token_101|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128110": { + "content": "<|reserved_special_token_102|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128111": { + "content": "<|reserved_special_token_103|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128112": { + "content": "<|reserved_special_token_104|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128113": { + "content": "<|reserved_special_token_105|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128114": { + "content": "<|reserved_special_token_106|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128115": { + "content": "<|reserved_special_token_107|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128116": { + "content": "<|reserved_special_token_108|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128117": { + "content": "<|reserved_special_token_109|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128118": { + "content": "<|reserved_special_token_110|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128119": { + "content": "<|reserved_special_token_111|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128120": { + "content": "<|reserved_special_token_112|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128121": { + "content": "<|reserved_special_token_113|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128122": { + "content": "<|reserved_special_token_114|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128123": { + "content": "<|reserved_special_token_115|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128124": { + "content": "<|reserved_special_token_116|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128125": { + "content": "<|reserved_special_token_117|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128126": { + "content": "<|reserved_special_token_118|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128127": { + "content": "<|reserved_special_token_119|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128128": { + "content": "<|reserved_special_token_120|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128129": { + "content": "<|reserved_special_token_121|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128130": { + "content": "<|reserved_special_token_122|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128131": { + "content": "<|reserved_special_token_123|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128132": { + "content": "<|reserved_special_token_124|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128133": { + "content": "<|reserved_special_token_125|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128134": { + "content": "<|reserved_special_token_126|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128135": { + "content": "<|reserved_special_token_127|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128136": { + "content": "<|reserved_special_token_128|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128137": { + "content": "<|reserved_special_token_129|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128138": { + "content": "<|reserved_special_token_130|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128139": { + "content": "<|reserved_special_token_131|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128140": { + "content": "<|reserved_special_token_132|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128141": { + "content": "<|reserved_special_token_133|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128142": { + "content": "<|reserved_special_token_134|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128143": { + "content": "<|reserved_special_token_135|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128144": { + "content": "<|reserved_special_token_136|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128145": { + "content": "<|reserved_special_token_137|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128146": { + "content": "<|reserved_special_token_138|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128147": { + "content": "<|reserved_special_token_139|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128148": { + "content": "<|reserved_special_token_140|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128149": { + "content": "<|reserved_special_token_141|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128150": { + "content": "<|reserved_special_token_142|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128151": { + "content": "<|reserved_special_token_143|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128152": { + "content": "<|reserved_special_token_144|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128153": { + "content": "<|reserved_special_token_145|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128154": { + "content": "<|reserved_special_token_146|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128155": { + "content": "<|reserved_special_token_147|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128156": { + "content": "<|reserved_special_token_148|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128157": { + "content": "<|reserved_special_token_149|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128158": { + "content": "<|reserved_special_token_150|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128159": { + "content": "<|reserved_special_token_151|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128160": { + "content": "<|reserved_special_token_152|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128161": { + "content": "<|reserved_special_token_153|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128162": { + "content": "<|reserved_special_token_154|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128163": { + "content": "<|reserved_special_token_155|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128164": { + "content": "<|reserved_special_token_156|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128165": { + "content": "<|reserved_special_token_157|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128166": { + "content": "<|reserved_special_token_158|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128167": { + "content": "<|reserved_special_token_159|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128168": { + "content": "<|reserved_special_token_160|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128169": { + "content": "<|reserved_special_token_161|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128170": { + "content": "<|reserved_special_token_162|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128171": { + "content": "<|reserved_special_token_163|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128172": { + "content": "<|reserved_special_token_164|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128173": { + "content": "<|reserved_special_token_165|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128174": { + "content": "<|reserved_special_token_166|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128175": { + "content": "<|reserved_special_token_167|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128176": { + "content": "<|reserved_special_token_168|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128177": { + "content": "<|reserved_special_token_169|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128178": { + "content": "<|reserved_special_token_170|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128179": { + "content": "<|reserved_special_token_171|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128180": { + "content": "<|reserved_special_token_172|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128181": { + "content": "<|reserved_special_token_173|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128182": { + "content": "<|reserved_special_token_174|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128183": { + "content": "<|reserved_special_token_175|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128184": { + "content": "<|reserved_special_token_176|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128185": { + "content": "<|reserved_special_token_177|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128186": { + "content": "<|reserved_special_token_178|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128187": { + "content": "<|reserved_special_token_179|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128188": { + "content": "<|reserved_special_token_180|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128189": { + "content": "<|reserved_special_token_181|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128190": { + "content": "<|reserved_special_token_182|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128191": { + "content": "<|reserved_special_token_183|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128192": { + "content": "<|reserved_special_token_184|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128193": { + "content": "<|reserved_special_token_185|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128194": { + "content": "<|reserved_special_token_186|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128195": { + "content": "<|reserved_special_token_187|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128196": { + "content": "<|reserved_special_token_188|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128197": { + "content": "<|reserved_special_token_189|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128198": { + "content": "<|reserved_special_token_190|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128199": { + "content": "<|reserved_special_token_191|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128200": { + "content": "<|reserved_special_token_192|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128201": { + "content": "<|reserved_special_token_193|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128202": { + "content": "<|reserved_special_token_194|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128203": { + "content": "<|reserved_special_token_195|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128204": { + "content": "<|reserved_special_token_196|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128205": { + "content": "<|reserved_special_token_197|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128206": { + "content": "<|reserved_special_token_198|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128207": { + "content": "<|reserved_special_token_199|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128208": { + "content": "<|reserved_special_token_200|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128209": { + "content": "<|reserved_special_token_201|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128210": { + "content": "<|reserved_special_token_202|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128211": { + "content": "<|reserved_special_token_203|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128212": { + "content": "<|reserved_special_token_204|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128213": { + "content": "<|reserved_special_token_205|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128214": { + "content": "<|reserved_special_token_206|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128215": { + "content": "<|reserved_special_token_207|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128216": { + "content": "<|reserved_special_token_208|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128217": { + "content": "<|reserved_special_token_209|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128218": { + "content": "<|reserved_special_token_210|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128219": { + "content": "<|reserved_special_token_211|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128220": { + "content": "<|reserved_special_token_212|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128221": { + "content": "<|reserved_special_token_213|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128222": { + "content": "<|reserved_special_token_214|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128223": { + "content": "<|reserved_special_token_215|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128224": { + "content": "<|reserved_special_token_216|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128225": { + "content": "<|reserved_special_token_217|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128226": { + "content": "<|reserved_special_token_218|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128227": { + "content": "<|reserved_special_token_219|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128228": { + "content": "<|reserved_special_token_220|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128229": { + "content": "<|reserved_special_token_221|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128230": { + "content": "<|reserved_special_token_222|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128231": { + "content": "<|reserved_special_token_223|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128232": { + "content": "<|reserved_special_token_224|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128233": { + "content": "<|reserved_special_token_225|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128234": { + "content": "<|reserved_special_token_226|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128235": { + "content": "<|reserved_special_token_227|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128236": { + "content": "<|reserved_special_token_228|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128237": { + "content": "<|reserved_special_token_229|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128238": { + "content": "<|reserved_special_token_230|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128239": { + "content": "<|reserved_special_token_231|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128240": { + "content": "<|reserved_special_token_232|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128241": { + "content": "<|reserved_special_token_233|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128242": { + "content": "<|reserved_special_token_234|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128243": { + "content": "<|reserved_special_token_235|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128244": { + "content": "<|reserved_special_token_236|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128245": { + "content": "<|reserved_special_token_237|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128246": { + "content": "<|reserved_special_token_238|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128247": { + "content": "<|reserved_special_token_239|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128248": { + "content": "<|reserved_special_token_240|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128249": { + "content": "<|reserved_special_token_241|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128250": { + "content": "<|reserved_special_token_242|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128251": { + "content": "<|reserved_special_token_243|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128252": { + "content": "<|reserved_special_token_244|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128253": { + "content": "<|reserved_special_token_245|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128254": { + "content": "<|reserved_special_token_246|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128255": { + "content": "<|reserved_special_token_247|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "bos_token": "<|begin_of_text|>", + "clean_up_tokenization_spaces": true, + "eos_token": "<|eot_id|>", + "extra_special_tokens": {}, + "model_input_names": [ + "input_ids", + "attention_mask" + ], + "model_max_length": 131072, + "pad_token": "<|finetune_right_pad_id|>", + "padding_side": "left", + "tokenizer_class": "PreTrainedTokenizer", + "unk_token": null, + "chat_template": "{%- if tools %}{{- '<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n' -}}{%- if messages[0].role == 'system' -%}{{- messages[0].content + '\n\n' -}}{%- endif -%}{{- '[' -}}{%- for tool in tools -%}{{- (tool.function if tool.function is defined else tool) | tojson -}}{{- ', ' if not loop.last else '' -}}{%- endfor -%}{{- ']' -}}{{- '<|eot_id|>' -}}{%- else %}{%- if messages[0].role == 'system' %}{{- '<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n' + messages[0].content + '<|eot_id|>' -}}{%- endif %}{%- endif %}{%- for message in messages -%}{%- if (message.role == 'user') -%}{{- '<|start_header_id|>user<|end_header_id|>\n\n' + message.content + '<|eot_id|>' -}}{%- elif message.role == 'assistant' -%}{%- set content = message.content -%}{{- '<|start_header_id|>assistant<|end_header_id|>\n\n' + content -}}{%- if message.tool_calls -%}{{- '[' -}}{%- for tool_call in message.tool_calls -%}{%- if tool_call.function -%}{%- set tool_call = tool_call.function -%}{%- endif -%}{{- '{\"name\": \"' }}{{- tool_call.name }}{{- '\", \"arguments\": ' -}}{%- if tool_call.arguments is string -%}{{- tool_call.arguments -}}{%- else -%}{{- tool_call.arguments | tojson -}}{%- endif -%}{{- ', ' if not loop.last else '' -}}{%- endfor -%}{{- ']' -}}{%- endif %}{{- '<|eot_id|>' -}}{%- elif message.role == 'tool' -%}{%- if loop.first or (messages[loop.index0 - 1].role != 'tool') -%}{{- '<|start_header_id|>user<|end_header_id|>\n\n' }}{{- '[' -}}{%- endif -%}{{- message.content -}}{{- ', ' if not loop.last and (messages[loop.index0 + 1].role == 'tool') else '' -}}{%- if loop.last or (messages[loop.index0 + 1].role != 'tool') -%}{{- ']' -}}{{- '<|eot_id|>' -}}{%- endif %}{%- endif %}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}}{%- endif %}" +} \ No newline at end of file