Add files using upload-large-folder tool
Browse files- README.md +16 -26
- chat_template.jinja +2 -2
- config.json +2 -75
- docs/mlx_deploy_guide.md +70 -0
- docs/sglang_deploy_guide_cn.md +3 -0
- docs/tool_calling_guide_cn.md +13 -1
- docs/vllm_deploy_guide_cn.md +3 -0
- figures/wechat.jpeg +0 -0
- merges.txt +1 -1
- tokenizer.json +2 -2
- tokenizer_config.json +243 -248
- vocab.json +0 -0
README.md
CHANGED
|
@@ -1,33 +1,8 @@
|
|
| 1 |
---
|
| 2 |
-
tags:
|
| 3 |
-
- unsloth
|
| 4 |
-
base_model:
|
| 5 |
-
- MiniMaxAI/MiniMax-M2
|
| 6 |
pipeline_tag: text-generation
|
| 7 |
license: mit
|
| 8 |
library_name: transformers
|
| 9 |
---
|
| 10 |
-
> [!NOTE]
|
| 11 |
-
> Includes Unsloth **chat template fixes**! <br> For `llama.cpp`, use `--jinja`
|
| 12 |
-
>
|
| 13 |
-
|
| 14 |
-
<div>
|
| 15 |
-
<p style="margin-top: 0;margin-bottom: 0;">
|
| 16 |
-
<em><a href="https://docs.unsloth.ai/basics/unsloth-dynamic-v2.0-gguf">Unsloth Dynamic 2.0</a> achieves superior accuracy & outperforms other leading quants.</em>
|
| 17 |
-
</p>
|
| 18 |
-
<div style="display: flex; gap: 5px; align-items: center; ">
|
| 19 |
-
<a href="https://github.com/unslothai/unsloth/">
|
| 20 |
-
<img src="https://github.com/unslothai/unsloth/raw/main/images/unsloth%20new%20logo.png" width="133">
|
| 21 |
-
</a>
|
| 22 |
-
<a href="https://discord.gg/unsloth">
|
| 23 |
-
<img src="https://github.com/unslothai/unsloth/raw/main/images/Discord%20button.png" width="173">
|
| 24 |
-
</a>
|
| 25 |
-
<a href="https://docs.unsloth.ai/">
|
| 26 |
-
<img src="https://raw.githubusercontent.com/unslothai/unsloth/refs/heads/main/images/documentation%20green%20button.png" width="143">
|
| 27 |
-
</a>
|
| 28 |
-
</div>
|
| 29 |
-
</div>
|
| 30 |
-
|
| 31 |
|
| 32 |
<div align="center">
|
| 33 |
|
|
@@ -200,6 +175,11 @@ We recommend using [SGLang](https://docs.sglang.ai/) to serve MiniMax-M2. SGLang
|
|
| 200 |
|
| 201 |
We recommend using [vLLM](https://docs.vllm.ai/en/stable/) to serve MiniMax-M2. vLLM provides efficient day-0 support of MiniMax-M2 model, check https://docs.vllm.ai/projects/recipes/en/latest/MiniMax/MiniMax-M2.html for latest deployment guide. We also provide our [vLLM Deployment Guide](https://huggingface.co/MiniMaxAI/MiniMax-M2/blob/main/docs/vllm_deploy_guide.md).
|
| 202 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 203 |
### Inference Parameters
|
| 204 |
We recommend using the following parameters for best performance: `temperature=1.0`, `top_p = 0.95`, `top_k = 40`.
|
| 205 |
|
|
@@ -209,6 +189,16 @@ We recommend using the following parameters for best performance: `temperature=1
|
|
| 209 |
|
| 210 |
Please refer to our [Tool Calling Guide](https://huggingface.co/MiniMaxAI/MiniMax-M2/blob/main/docs/tool_calling_guide.md).
|
| 211 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 212 |
# Contact Us
|
| 213 |
|
| 214 |
-
Contact us at [model@minimax.io](mailto:model@minimax.io).
|
|
|
|
| 1 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
pipeline_tag: text-generation
|
| 3 |
license: mit
|
| 4 |
library_name: transformers
|
| 5 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
<div align="center">
|
| 8 |
|
|
|
|
| 175 |
|
| 176 |
We recommend using [vLLM](https://docs.vllm.ai/en/stable/) to serve MiniMax-M2. vLLM provides efficient day-0 support of MiniMax-M2 model, check https://docs.vllm.ai/projects/recipes/en/latest/MiniMax/MiniMax-M2.html for latest deployment guide. We also provide our [vLLM Deployment Guide](https://huggingface.co/MiniMaxAI/MiniMax-M2/blob/main/docs/vllm_deploy_guide.md).
|
| 177 |
|
| 178 |
+
### MLX
|
| 179 |
+
|
| 180 |
+
We recommend using [MLX-LM](https://github.com/ml-explore/mlx-lm) to serve MiniMax-M2. Please refer to our [MLX Deployment Guide](https://huggingface.co/MiniMaxAI/MiniMax-M2/blob/main/docs/mlx_deploy_guide.md) for more details.
|
| 181 |
+
|
| 182 |
+
|
| 183 |
### Inference Parameters
|
| 184 |
We recommend using the following parameters for best performance: `temperature=1.0`, `top_p = 0.95`, `top_k = 40`.
|
| 185 |
|
|
|
|
| 189 |
|
| 190 |
Please refer to our [Tool Calling Guide](https://huggingface.co/MiniMaxAI/MiniMax-M2/blob/main/docs/tool_calling_guide.md).
|
| 191 |
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
# Community Showcases
|
| 195 |
+
|
| 196 |
+
> The projects below are built and maintained by the community/partners. They are not official MiniMax products, and results may vary.
|
| 197 |
+
|
| 198 |
+
- **AnyCoder** — a web IDE–style coding assistant Space on Hugging Face, **uses MiniMax-M2 as the default model**: https://huggingface.co/spaces/akhaliq/anycoder
|
| 199 |
+
*Maintainer:* @akhaliq (Hugging Face)
|
| 200 |
+
|
| 201 |
+
|
| 202 |
# Contact Us
|
| 203 |
|
| 204 |
+
Contact us at [model@minimax.io](mailto:model@minimax.io) | [WeChat](https://github.com/MiniMax-AI/MiniMax-AI.github.io/blob/main/images/wechat-qrcode.jpeg).
|
chat_template.jinja
CHANGED
|
@@ -4,7 +4,7 @@
|
|
| 4 |
{#- Tool Rendering Functions ============================================== -#}
|
| 5 |
{%- macro render_tool_namespace(namespace_name, tool_list) -%}
|
| 6 |
{%- for tool in tool_list -%}
|
| 7 |
-
<tool>{{ tool.function | tojson
|
| 8 |
{% endfor -%}
|
| 9 |
{%- endmacro -%}
|
| 10 |
{%- macro visible_text(content) -%}
|
|
@@ -111,7 +111,7 @@
|
|
| 111 |
{% set _args = tool_call.arguments %}
|
| 112 |
{%- for k, v in _args.items() %}
|
| 113 |
{{- '<parameter name="' + k + '">' }}
|
| 114 |
-
{{- v | tojson
|
| 115 |
{{- '</parameter>' }}
|
| 116 |
{% endfor %}
|
| 117 |
{{- '</invoke>' ~ '\n' }}
|
|
|
|
| 4 |
{#- Tool Rendering Functions ============================================== -#}
|
| 5 |
{%- macro render_tool_namespace(namespace_name, tool_list) -%}
|
| 6 |
{%- for tool in tool_list -%}
|
| 7 |
+
<tool>{{ tool.function | tojson(ensure_ascii=False) }}</tool>
|
| 8 |
{% endfor -%}
|
| 9 |
{%- endmacro -%}
|
| 10 |
{%- macro visible_text(content) -%}
|
|
|
|
| 111 |
{% set _args = tool_call.arguments %}
|
| 112 |
{%- for k, v in _args.items() %}
|
| 113 |
{{- '<parameter name="' + k + '">' }}
|
| 114 |
+
{{- v | tojson(ensure_ascii=False) if v is not string else v }}
|
| 115 |
{{- '</parameter>' }}
|
| 116 |
{% endfor %}
|
| 117 |
{{- '</invoke>' ~ '\n' }}
|
config.json
CHANGED
|
@@ -67,88 +67,17 @@
|
|
| 67 |
1,
|
| 68 |
1
|
| 69 |
],
|
| 70 |
-
"block_size": 256,
|
| 71 |
"bos_token_id": null,
|
| 72 |
"eos_token_id": null,
|
| 73 |
-
"full_attn_alpha_factor": 1,
|
| 74 |
-
"full_attn_beta_factor": 1,
|
| 75 |
"head_dim": 128,
|
| 76 |
"hidden_act": "silu",
|
| 77 |
"hidden_size": 3072,
|
| 78 |
"initializer_range": 0.02,
|
| 79 |
"intermediate_size": 1536,
|
| 80 |
-
"layer_types": [
|
| 81 |
-
"full_attention",
|
| 82 |
-
"linear_attention",
|
| 83 |
-
"full_attention",
|
| 84 |
-
"linear_attention",
|
| 85 |
-
"full_attention",
|
| 86 |
-
"linear_attention",
|
| 87 |
-
"full_attention",
|
| 88 |
-
"linear_attention",
|
| 89 |
-
"full_attention",
|
| 90 |
-
"linear_attention",
|
| 91 |
-
"full_attention",
|
| 92 |
-
"linear_attention",
|
| 93 |
-
"full_attention",
|
| 94 |
-
"linear_attention",
|
| 95 |
-
"full_attention",
|
| 96 |
-
"linear_attention",
|
| 97 |
-
"full_attention",
|
| 98 |
-
"linear_attention",
|
| 99 |
-
"full_attention",
|
| 100 |
-
"linear_attention",
|
| 101 |
-
"full_attention",
|
| 102 |
-
"linear_attention",
|
| 103 |
-
"full_attention",
|
| 104 |
-
"linear_attention",
|
| 105 |
-
"full_attention",
|
| 106 |
-
"linear_attention",
|
| 107 |
-
"full_attention",
|
| 108 |
-
"linear_attention",
|
| 109 |
-
"full_attention",
|
| 110 |
-
"linear_attention",
|
| 111 |
-
"full_attention",
|
| 112 |
-
"linear_attention",
|
| 113 |
-
"full_attention",
|
| 114 |
-
"linear_attention",
|
| 115 |
-
"full_attention",
|
| 116 |
-
"linear_attention",
|
| 117 |
-
"full_attention",
|
| 118 |
-
"linear_attention",
|
| 119 |
-
"full_attention",
|
| 120 |
-
"linear_attention",
|
| 121 |
-
"full_attention",
|
| 122 |
-
"linear_attention",
|
| 123 |
-
"full_attention",
|
| 124 |
-
"linear_attention",
|
| 125 |
-
"full_attention",
|
| 126 |
-
"linear_attention",
|
| 127 |
-
"full_attention",
|
| 128 |
-
"linear_attention",
|
| 129 |
-
"full_attention",
|
| 130 |
-
"linear_attention",
|
| 131 |
-
"full_attention",
|
| 132 |
-
"linear_attention",
|
| 133 |
-
"full_attention",
|
| 134 |
-
"linear_attention",
|
| 135 |
-
"full_attention",
|
| 136 |
-
"linear_attention",
|
| 137 |
-
"full_attention",
|
| 138 |
-
"linear_attention",
|
| 139 |
-
"full_attention",
|
| 140 |
-
"linear_attention",
|
| 141 |
-
"full_attention",
|
| 142 |
-
"linear_attention"
|
| 143 |
-
],
|
| 144 |
"layernorm_full_attention_beta": 1.0,
|
| 145 |
"layernorm_linear_attention_beta": 1.0,
|
| 146 |
"layernorm_mlp_beta": 1.0,
|
| 147 |
-
"linear_attn_alpha_factor": 1,
|
| 148 |
-
"linear_attn_beta_factor": 1,
|
| 149 |
"max_position_embeddings": 196608,
|
| 150 |
-
"mlp_alpha_factor": 1,
|
| 151 |
-
"mlp_beta_factor": 1,
|
| 152 |
"mlp_intermediate_size": 8192,
|
| 153 |
"model_type": "minimax",
|
| 154 |
"mtp_transformer_layers": 1,
|
|
@@ -159,7 +88,6 @@
|
|
| 159 |
"num_local_experts": 256,
|
| 160 |
"num_mtp_modules": 3,
|
| 161 |
"output_router_logits": false,
|
| 162 |
-
"pad_token_id": 200004,
|
| 163 |
"qk_norm_type": "per_layer",
|
| 164 |
"quantization_config": {
|
| 165 |
"activation_scheme": "dynamic",
|
|
@@ -180,11 +108,10 @@
|
|
| 180 |
"shared_moe_mode": "sigmoid",
|
| 181 |
"sliding_window": null,
|
| 182 |
"tie_word_embeddings": false,
|
| 183 |
-
"transformers_version": "4.
|
| 184 |
-
"unsloth_fixed": true,
|
| 185 |
"use_cache": true,
|
| 186 |
"use_mtp": true,
|
| 187 |
"use_qk_norm": true,
|
| 188 |
"use_routing_bias": true,
|
| 189 |
"vocab_size": 200064
|
| 190 |
-
}
|
|
|
|
| 67 |
1,
|
| 68 |
1
|
| 69 |
],
|
|
|
|
| 70 |
"bos_token_id": null,
|
| 71 |
"eos_token_id": null,
|
|
|
|
|
|
|
| 72 |
"head_dim": 128,
|
| 73 |
"hidden_act": "silu",
|
| 74 |
"hidden_size": 3072,
|
| 75 |
"initializer_range": 0.02,
|
| 76 |
"intermediate_size": 1536,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
"layernorm_full_attention_beta": 1.0,
|
| 78 |
"layernorm_linear_attention_beta": 1.0,
|
| 79 |
"layernorm_mlp_beta": 1.0,
|
|
|
|
|
|
|
| 80 |
"max_position_embeddings": 196608,
|
|
|
|
|
|
|
| 81 |
"mlp_intermediate_size": 8192,
|
| 82 |
"model_type": "minimax",
|
| 83 |
"mtp_transformer_layers": 1,
|
|
|
|
| 88 |
"num_local_experts": 256,
|
| 89 |
"num_mtp_modules": 3,
|
| 90 |
"output_router_logits": false,
|
|
|
|
| 91 |
"qk_norm_type": "per_layer",
|
| 92 |
"quantization_config": {
|
| 93 |
"activation_scheme": "dynamic",
|
|
|
|
| 108 |
"shared_moe_mode": "sigmoid",
|
| 109 |
"sliding_window": null,
|
| 110 |
"tie_word_embeddings": false,
|
| 111 |
+
"transformers_version": "4.46.1",
|
|
|
|
| 112 |
"use_cache": true,
|
| 113 |
"use_mtp": true,
|
| 114 |
"use_qk_norm": true,
|
| 115 |
"use_routing_bias": true,
|
| 116 |
"vocab_size": 200064
|
| 117 |
+
}
|
docs/mlx_deploy_guide.md
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
## MLX deployment guide
|
| 2 |
+
|
| 3 |
+
Run, serve, and fine-tune [**MiniMax-M2**](https://huggingface.co/MiniMaxAI/MiniMax-M2) locally on your Mac using the **MLX** framework. This guide gets you up and running quickly.
|
| 4 |
+
|
| 5 |
+
> **Requirements**
|
| 6 |
+
> - Apple Silicon Mac (M3 Ultra or later)
|
| 7 |
+
> - **At least 256GB of unified memory (RAM)**
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
**Installation**
|
| 11 |
+
|
| 12 |
+
Install the `mlx-lm` package via pip:
|
| 13 |
+
|
| 14 |
+
```bash
|
| 15 |
+
pip install -U mlx-lm
|
| 16 |
+
```
|
| 17 |
+
|
| 18 |
+
**CLI**
|
| 19 |
+
|
| 20 |
+
Generate text directly from the terminal:
|
| 21 |
+
|
| 22 |
+
```bash
|
| 23 |
+
mlx_lm.generate \
|
| 24 |
+
--model mlx-community/MiniMax-M2-4bit \
|
| 25 |
+
--prompt "How tall is Mount Everest?"
|
| 26 |
+
```
|
| 27 |
+
|
| 28 |
+
> Add `--max-tokens 256` to control response length, or `--temp 0.7` for creativity.
|
| 29 |
+
|
| 30 |
+
**Python Script Example**
|
| 31 |
+
|
| 32 |
+
Use `mlx-lm` in your own Python scripts:
|
| 33 |
+
|
| 34 |
+
```python
|
| 35 |
+
from mlx_lm import load, generate
|
| 36 |
+
|
| 37 |
+
# Load the quantized model
|
| 38 |
+
model, tokenizer = load("mlx-community/MiniMax-M2-4bit")
|
| 39 |
+
|
| 40 |
+
prompt = "Hello, how are you?"
|
| 41 |
+
|
| 42 |
+
# Apply chat template if available (recommended for chat models)
|
| 43 |
+
if tokenizer.chat_template is not None:
|
| 44 |
+
messages = [{"role": "user", "content": prompt}]
|
| 45 |
+
prompt = tokenizer.apply_chat_template(
|
| 46 |
+
messages,
|
| 47 |
+
tokenize=False,
|
| 48 |
+
add_generation_prompt=True
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
# Generate response
|
| 52 |
+
response = generate(
|
| 53 |
+
model,
|
| 54 |
+
tokenizer,
|
| 55 |
+
prompt=prompt,
|
| 56 |
+
max_tokens=256,
|
| 57 |
+
temp=0.7,
|
| 58 |
+
verbose=True
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
print(response)
|
| 62 |
+
```
|
| 63 |
+
|
| 64 |
+
**Tips**
|
| 65 |
+
- **Model variants**: Check this [MLX community collection on Hugging Face](https://huggingface.co/collections/mlx-community/minimax-m2) for `MiniMax-M2-4bit`, `6bit`, `8bit`, or `bfloat16` versions.
|
| 66 |
+
- **Fine-tuning**: Use `mlx-lm.lora` for efficient parameter-efficient fine-tuning (PEFT).
|
| 67 |
+
|
| 68 |
+
**Resources**
|
| 69 |
+
- GitHub: [https://github.com/ml-explore/mlx-lm](https://github.com/ml-explore/mlx-lm)
|
| 70 |
+
- Models: [https://huggingface.co/mlx-community](https://huggingface.co/mlx-community)
|
docs/sglang_deploy_guide_cn.md
CHANGED
|
@@ -112,4 +112,7 @@ export HF_ENDPOINT=https://hf-mirror.com
|
|
| 112 |
- 通过邮箱 [model@minimax.io](mailto:model@minimax.io) 等官方渠道联系我们的技术支持团队
|
| 113 |
|
| 114 |
- 在我们的 [GitHub](https://github.com/MiniMax-AI) 仓库提交 Issue
|
|
|
|
|
|
|
|
|
|
| 115 |
我们会持续优化模型的部署体验,欢迎反馈!
|
|
|
|
| 112 |
- 通过邮箱 [model@minimax.io](mailto:model@minimax.io) 等官方渠道联系我们的技术支持团队
|
| 113 |
|
| 114 |
- 在我们的 [GitHub](https://github.com/MiniMax-AI) 仓库提交 Issue
|
| 115 |
+
|
| 116 |
+
- 通过我们的 [官方企业微信交流群](https://github.com/MiniMax-AI/MiniMax-AI.github.io/blob/main/images/wechat-qrcode.jpeg) 反馈
|
| 117 |
+
|
| 118 |
我们会持续优化模型的部署体验,欢迎反馈!
|
docs/tool_calling_guide_cn.md
CHANGED
|
@@ -482,4 +482,16 @@ def execute_function_call(function_name: str, arguments: dict):
|
|
| 482 |
- [MiniMax-M2 模型仓库](https://github.com/MiniMax-AI/MiniMax-M2)
|
| 483 |
- [vLLM 项目主页](https://github.com/vllm-project/vllm)
|
| 484 |
- [SGLang 项目主页](https://github.com/sgl-project/sglang)
|
| 485 |
-
- [OpenAI Python SDK](https://github.com/openai/openai-python)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 482 |
- [MiniMax-M2 模型仓库](https://github.com/MiniMax-AI/MiniMax-M2)
|
| 483 |
- [vLLM 项目主页](https://github.com/vllm-project/vllm)
|
| 484 |
- [SGLang 项目主页](https://github.com/sgl-project/sglang)
|
| 485 |
+
- [OpenAI Python SDK](https://github.com/openai/openai-python)
|
| 486 |
+
|
| 487 |
+
## 获取支持
|
| 488 |
+
|
| 489 |
+
如果遇到任何问题:
|
| 490 |
+
|
| 491 |
+
- 通过邮箱 [model@minimax.io](mailto:model@minimax.io) 等官方渠道联系我们的技术支持团队
|
| 492 |
+
|
| 493 |
+
- 在我们的仓库提交 Issue
|
| 494 |
+
|
| 495 |
+
- 通过我们的 [官方企业微信交流群](https://github.com/MiniMax-AI/MiniMax-AI.github.io/blob/main/images/wechat-qrcode.jpeg) 反馈
|
| 496 |
+
|
| 497 |
+
我们会持续优化模型的使用体验,欢迎反馈!
|
docs/vllm_deploy_guide_cn.md
CHANGED
|
@@ -110,4 +110,7 @@ SAFETENSORS_FAST_GPU=1 vllm serve \
|
|
| 110 |
- 通过邮箱 [model@minimax.io](mailto:model@minimax.io) 等官方渠道联系我们的技术支持团队
|
| 111 |
|
| 112 |
- 在我们的 [GitHub](https://github.com/MiniMax-AI) 仓库提交 Issue
|
|
|
|
|
|
|
|
|
|
| 113 |
我们会持续优化模型的部署体验,欢迎反馈!
|
|
|
|
| 110 |
- 通过邮箱 [model@minimax.io](mailto:model@minimax.io) 等官方渠道联系我们的技术支持团队
|
| 111 |
|
| 112 |
- 在我们的 [GitHub](https://github.com/MiniMax-AI) 仓库提交 Issue
|
| 113 |
+
|
| 114 |
+
- 通过我们的 [官方企业微信交流群](https://github.com/MiniMax-AI/MiniMax-AI.github.io/blob/main/images/wechat-qrcode.jpeg) 反馈
|
| 115 |
+
|
| 116 |
我们会持续优化模型的部署体验,欢迎反馈!
|
figures/wechat.jpeg
ADDED
|
merges.txt
CHANGED
|
@@ -199742,4 +199742,4 @@ ch ile
|
|
| 199742 |
ع اÙĨ
|
| 199743 |
Ġdel ving
|
| 199744 |
Ġت ÙĥÙĪÙĬÙĨ
|
| 199745 |
-
.ch romium
|
|
|
|
| 199742 |
ع اÙĨ
|
| 199743 |
Ġdel ving
|
| 199744 |
Ġت ÙĥÙĪÙĬÙĨ
|
| 199745 |
+
.ch romium
|
tokenizer.json
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:757622126525aeeb131756849d93298070ff3f0319c455ec8c5bb0f6b1cebbe8
|
| 3 |
+
size 9730160
|
tokenizer_config.json
CHANGED
|
@@ -1,500 +1,495 @@
|
|
| 1 |
{
|
| 2 |
-
"add_bos_token": true,
|
| 3 |
-
"add_prefix_space": false,
|
| 4 |
"added_tokens_decoder": {
|
| 5 |
-
|
| 6 |
"content": "]!p~[",
|
| 7 |
"lstrip": false,
|
| 8 |
"normalized": false,
|
| 9 |
"rstrip": false,
|
| 10 |
"single_word": false,
|
| 11 |
"special": true
|
| 12 |
-
|
| 13 |
-
|
| 14 |
"content": "<fim_prefix>",
|
| 15 |
"lstrip": false,
|
| 16 |
"normalized": false,
|
| 17 |
"rstrip": false,
|
| 18 |
"single_word": false,
|
| 19 |
"special": true
|
| 20 |
-
|
| 21 |
-
|
| 22 |
"content": "<fim_middle>",
|
| 23 |
"lstrip": false,
|
| 24 |
"normalized": false,
|
| 25 |
"rstrip": false,
|
| 26 |
"single_word": false,
|
| 27 |
"special": true
|
| 28 |
-
|
| 29 |
-
|
| 30 |
"content": "<fim_suffix>",
|
| 31 |
"lstrip": false,
|
| 32 |
"normalized": false,
|
| 33 |
"rstrip": false,
|
| 34 |
"single_word": false,
|
| 35 |
"special": true
|
| 36 |
-
|
| 37 |
-
|
| 38 |
"content": "<fim_pad>",
|
| 39 |
"lstrip": false,
|
| 40 |
"normalized": false,
|
| 41 |
"rstrip": false,
|
| 42 |
"single_word": false,
|
| 43 |
"special": true
|
| 44 |
-
|
| 45 |
-
|
| 46 |
"content": "<reponame>",
|
| 47 |
"lstrip": false,
|
| 48 |
"normalized": false,
|
| 49 |
"rstrip": false,
|
| 50 |
"single_word": false,
|
| 51 |
"special": true
|
| 52 |
-
|
| 53 |
-
|
| 54 |
"content": "<filename>",
|
| 55 |
"lstrip": false,
|
| 56 |
"normalized": false,
|
| 57 |
"rstrip": false,
|
| 58 |
"single_word": false,
|
| 59 |
"special": true
|
| 60 |
-
|
| 61 |
-
|
| 62 |
"content": "<gh_stars>",
|
| 63 |
"lstrip": false,
|
| 64 |
"normalized": false,
|
| 65 |
"rstrip": false,
|
| 66 |
"single_word": false,
|
| 67 |
"special": true
|
| 68 |
-
|
| 69 |
-
|
| 70 |
"content": "<issue_start>",
|
| 71 |
"lstrip": false,
|
| 72 |
"normalized": false,
|
| 73 |
"rstrip": false,
|
| 74 |
"single_word": false,
|
| 75 |
"special": true
|
| 76 |
-
|
| 77 |
-
|
| 78 |
"content": "<issue_comment>",
|
| 79 |
"lstrip": false,
|
| 80 |
"normalized": false,
|
| 81 |
"rstrip": false,
|
| 82 |
"single_word": false,
|
| 83 |
"special": true
|
| 84 |
-
|
| 85 |
-
|
| 86 |
"content": "<issue_closed>",
|
| 87 |
"lstrip": false,
|
| 88 |
"normalized": false,
|
| 89 |
"rstrip": false,
|
| 90 |
"single_word": false,
|
| 91 |
"special": true
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
"content": "<jupyter_text>",
|
| 103 |
"lstrip": false,
|
| 104 |
"normalized": false,
|
| 105 |
"rstrip": false,
|
| 106 |
"single_word": false,
|
| 107 |
"special": true
|
| 108 |
-
|
| 109 |
-
|
| 110 |
"content": "<jupyter_code>",
|
| 111 |
"lstrip": false,
|
| 112 |
"normalized": false,
|
| 113 |
"rstrip": false,
|
| 114 |
"single_word": false,
|
| 115 |
"special": true
|
| 116 |
-
|
| 117 |
-
|
| 118 |
"content": "<jupyter_output>",
|
| 119 |
"lstrip": false,
|
| 120 |
"normalized": false,
|
| 121 |
"rstrip": false,
|
| 122 |
"single_word": false,
|
| 123 |
"special": true
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
|
| 134 |
"content": "<commit_before>",
|
| 135 |
"lstrip": false,
|
| 136 |
"normalized": false,
|
| 137 |
"rstrip": false,
|
| 138 |
"single_word": false,
|
| 139 |
"special": true
|
| 140 |
-
|
| 141 |
-
|
| 142 |
"content": "<commit_msg>",
|
| 143 |
"lstrip": false,
|
| 144 |
"normalized": false,
|
| 145 |
"rstrip": false,
|
| 146 |
"single_word": false,
|
| 147 |
"special": true
|
| 148 |
-
|
| 149 |
-
|
| 150 |
"content": "<commit_after>",
|
| 151 |
"lstrip": false,
|
| 152 |
"normalized": false,
|
| 153 |
"rstrip": false,
|
| 154 |
"single_word": false,
|
| 155 |
"special": true
|
| 156 |
-
|
| 157 |
-
|
| 158 |
"content": "]~b]",
|
| 159 |
"lstrip": false,
|
| 160 |
"normalized": false,
|
| 161 |
"rstrip": false,
|
| 162 |
"single_word": false,
|
| 163 |
"special": true
|
| 164 |
-
|
| 165 |
-
|
| 166 |
"content": "[e~[",
|
| 167 |
"lstrip": false,
|
| 168 |
"normalized": false,
|
| 169 |
"rstrip": false,
|
| 170 |
"single_word": false,
|
| 171 |
"special": true
|
| 172 |
-
|
| 173 |
-
|
| 174 |
"content": "]!d~[",
|
| 175 |
"lstrip": false,
|
| 176 |
"normalized": false,
|
| 177 |
"rstrip": false,
|
| 178 |
"single_word": false,
|
| 179 |
"special": true
|
| 180 |
-
|
| 181 |
-
|
| 182 |
"content": "<function_call>",
|
| 183 |
"lstrip": false,
|
| 184 |
"normalized": false,
|
| 185 |
"rstrip": false,
|
| 186 |
"single_word": false,
|
| 187 |
"special": true
|
| 188 |
-
|
| 189 |
-
|
| 190 |
"content": "<code_interpreter>",
|
| 191 |
"lstrip": false,
|
| 192 |
"normalized": false,
|
| 193 |
"rstrip": false,
|
| 194 |
"single_word": false,
|
| 195 |
"special": true
|
| 196 |
-
|
| 197 |
-
|
| 198 |
"content": "]<]speech[>[",
|
| 199 |
"lstrip": false,
|
| 200 |
"normalized": false,
|
| 201 |
"rstrip": false,
|
| 202 |
"single_word": false,
|
| 203 |
"special": true
|
| 204 |
-
|
| 205 |
-
|
| 206 |
"content": "]<]image[>[",
|
| 207 |
"lstrip": false,
|
| 208 |
"normalized": false,
|
| 209 |
"rstrip": false,
|
| 210 |
"single_word": false,
|
| 211 |
"special": true
|
| 212 |
-
|
| 213 |
-
|
| 214 |
"content": "]<]video[>[",
|
| 215 |
"lstrip": false,
|
| 216 |
"normalized": false,
|
| 217 |
"rstrip": false,
|
| 218 |
"single_word": false,
|
| 219 |
"special": true
|
| 220 |
-
|
| 221 |
-
|
| 222 |
"content": "]<]start of speech[>[",
|
| 223 |
"lstrip": false,
|
| 224 |
"normalized": false,
|
| 225 |
"rstrip": false,
|
| 226 |
"single_word": false,
|
| 227 |
"special": true
|
| 228 |
-
|
| 229 |
-
|
| 230 |
"content": "]<]end of speech[>[",
|
| 231 |
"lstrip": false,
|
| 232 |
"normalized": false,
|
| 233 |
"rstrip": false,
|
| 234 |
"single_word": false,
|
| 235 |
"special": true
|
| 236 |
-
|
| 237 |
-
|
| 238 |
"content": "]<]start of image[>[",
|
| 239 |
"lstrip": false,
|
| 240 |
"normalized": false,
|
| 241 |
"rstrip": false,
|
| 242 |
"single_word": false,
|
| 243 |
"special": true
|
| 244 |
-
|
| 245 |
-
|
| 246 |
"content": "]<]end of image[>[",
|
| 247 |
"lstrip": false,
|
| 248 |
"normalized": false,
|
| 249 |
"rstrip": false,
|
| 250 |
"single_word": false,
|
| 251 |
"special": true
|
| 252 |
-
|
| 253 |
-
|
| 254 |
"content": "]<]start of video[>[",
|
| 255 |
"lstrip": false,
|
| 256 |
"normalized": false,
|
| 257 |
"rstrip": false,
|
| 258 |
"single_word": false,
|
| 259 |
"special": true
|
| 260 |
-
|
| 261 |
-
|
| 262 |
"content": "]<]end of video[>[",
|
| 263 |
"lstrip": false,
|
| 264 |
"normalized": false,
|
| 265 |
"rstrip": false,
|
| 266 |
"single_word": false,
|
| 267 |
"special": true
|
| 268 |
-
|
| 269 |
-
|
| 270 |
"content": "]<]vision pad[>[",
|
| 271 |
"lstrip": false,
|
| 272 |
"normalized": false,
|
| 273 |
"rstrip": false,
|
| 274 |
"single_word": false,
|
| 275 |
"special": true
|
| 276 |
-
|
| 277 |
-
|
| 278 |
"content": "]~!b[",
|
| 279 |
"lstrip": false,
|
| 280 |
"normalized": false,
|
| 281 |
"rstrip": false,
|
| 282 |
"single_word": false,
|
| 283 |
"special": true
|
| 284 |
-
|
| 285 |
-
|
| 286 |
"content": "<jupyter_error>",
|
| 287 |
"lstrip": false,
|
| 288 |
"normalized": false,
|
| 289 |
"rstrip": false,
|
| 290 |
"single_word": false,
|
| 291 |
"special": true
|
| 292 |
-
|
| 293 |
-
|
| 294 |
"content": "<add_file>",
|
|
|
|
| 295 |
"lstrip": false,
|
| 296 |
-
"normalized": false,
|
| 297 |
"rstrip": false,
|
| 298 |
-
"
|
| 299 |
"special": true
|
| 300 |
-
|
| 301 |
-
|
| 302 |
"content": "<delete_file>",
|
| 303 |
"lstrip": false,
|
| 304 |
"normalized": false,
|
| 305 |
"rstrip": false,
|
| 306 |
"single_word": false,
|
| 307 |
"special": true
|
| 308 |
-
|
| 309 |
-
|
| 310 |
"content": "<rename_file>",
|
| 311 |
"lstrip": false,
|
| 312 |
"normalized": false,
|
| 313 |
"rstrip": false,
|
| 314 |
"single_word": false,
|
| 315 |
"special": true
|
| 316 |
-
|
| 317 |
-
|
| 318 |
"content": "<edit_file>",
|
| 319 |
"lstrip": false,
|
| 320 |
"normalized": false,
|
| 321 |
"rstrip": false,
|
| 322 |
"single_word": false,
|
| 323 |
"special": true
|
| 324 |
-
|
| 325 |
-
|
| 326 |
"content": "<commit_message>",
|
| 327 |
"lstrip": false,
|
| 328 |
"normalized": false,
|
| 329 |
"rstrip": false,
|
| 330 |
"single_word": false,
|
| 331 |
"special": true
|
| 332 |
-
|
| 333 |
-
|
| 334 |
"content": "<empty_source_file>",
|
| 335 |
"lstrip": false,
|
| 336 |
"normalized": false,
|
| 337 |
"rstrip": false,
|
| 338 |
"single_word": false,
|
| 339 |
"special": true
|
| 340 |
-
|
| 341 |
-
|
| 342 |
"content": "<repo_struct>",
|
| 343 |
"lstrip": false,
|
| 344 |
"normalized": false,
|
| 345 |
"rstrip": false,
|
| 346 |
"single_word": false,
|
| 347 |
"special": true
|
| 348 |
-
|
| 349 |
-
|
| 350 |
-
|
| 351 |
-
|
| 352 |
-
|
| 353 |
-
|
| 354 |
-
|
| 355 |
-
|
| 356 |
-
|
| 357 |
-
|
| 358 |
-
|
| 359 |
-
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
-
|
| 364 |
-
|
| 365 |
-
|
| 366 |
-
|
| 367 |
-
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
|
| 374 |
-
|
| 375 |
-
|
| 376 |
-
|
| 377 |
-
|
| 378 |
-
|
| 379 |
-
|
| 380 |
-
|
| 381 |
-
|
| 382 |
-
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
|
| 387 |
-
|
| 388 |
-
|
| 389 |
-
|
| 390 |
-
|
| 391 |
-
|
| 392 |
-
|
| 393 |
-
|
| 394 |
-
|
| 395 |
-
|
| 396 |
-
|
| 397 |
-
|
| 398 |
-
|
| 399 |
-
|
| 400 |
-
|
| 401 |
-
|
| 402 |
-
|
| 403 |
-
|
| 404 |
-
|
| 405 |
-
|
| 406 |
-
|
| 407 |
-
|
| 408 |
-
|
| 409 |
-
|
| 410 |
-
|
| 411 |
-
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
|
| 417 |
-
|
| 418 |
-
|
| 419 |
-
|
| 420 |
-
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
|
| 434 |
-
|
| 435 |
-
|
| 436 |
-
|
| 437 |
},
|
| 438 |
"additional_special_tokens": [
|
| 439 |
-
|
| 440 |
-
|
| 441 |
-
|
| 442 |
-
|
| 443 |
-
|
| 444 |
-
|
| 445 |
-
|
| 446 |
-
|
| 447 |
-
|
| 448 |
-
|
| 449 |
-
|
| 450 |
-
|
| 451 |
-
|
| 452 |
-
|
| 453 |
-
|
| 454 |
-
|
| 455 |
-
|
| 456 |
-
|
| 457 |
-
|
| 458 |
-
|
| 459 |
-
|
| 460 |
-
|
| 461 |
-
|
| 462 |
-
|
| 463 |
-
|
| 464 |
-
|
| 465 |
-
|
| 466 |
-
|
| 467 |
-
|
| 468 |
-
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
|
| 472 |
-
|
| 473 |
-
|
| 474 |
-
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
|
| 479 |
-
|
| 480 |
-
|
| 481 |
-
|
| 482 |
-
|
| 483 |
-
|
| 484 |
-
|
| 485 |
-
|
| 486 |
-
|
| 487 |
-
|
| 488 |
-
|
| 489 |
-
|
|
|
|
| 490 |
"bos_token": "]~!b[",
|
| 491 |
"clean_up_tokenization_spaces": false,
|
| 492 |
"eos_token": "[e~[",
|
| 493 |
-
"
|
| 494 |
-
"model_max_length": 196608,
|
| 495 |
-
"pad_token": "<fim_pad>",
|
| 496 |
-
"padding_side": "left",
|
| 497 |
"tokenizer_class": "GPT2Tokenizer",
|
| 498 |
-
"unk_token": "]!d~["
|
| 499 |
-
|
| 500 |
-
}
|
|
|
|
| 1 |
{
|
|
|
|
|
|
|
| 2 |
"added_tokens_decoder": {
|
| 3 |
+
"200000": {
|
| 4 |
"content": "]!p~[",
|
| 5 |
"lstrip": false,
|
| 6 |
"normalized": false,
|
| 7 |
"rstrip": false,
|
| 8 |
"single_word": false,
|
| 9 |
"special": true
|
| 10 |
+
},
|
| 11 |
+
"200001": {
|
| 12 |
"content": "<fim_prefix>",
|
| 13 |
"lstrip": false,
|
| 14 |
"normalized": false,
|
| 15 |
"rstrip": false,
|
| 16 |
"single_word": false,
|
| 17 |
"special": true
|
| 18 |
+
},
|
| 19 |
+
"200002": {
|
| 20 |
"content": "<fim_middle>",
|
| 21 |
"lstrip": false,
|
| 22 |
"normalized": false,
|
| 23 |
"rstrip": false,
|
| 24 |
"single_word": false,
|
| 25 |
"special": true
|
| 26 |
+
},
|
| 27 |
+
"200003": {
|
| 28 |
"content": "<fim_suffix>",
|
| 29 |
"lstrip": false,
|
| 30 |
"normalized": false,
|
| 31 |
"rstrip": false,
|
| 32 |
"single_word": false,
|
| 33 |
"special": true
|
| 34 |
+
},
|
| 35 |
+
"200004": {
|
| 36 |
"content": "<fim_pad>",
|
| 37 |
"lstrip": false,
|
| 38 |
"normalized": false,
|
| 39 |
"rstrip": false,
|
| 40 |
"single_word": false,
|
| 41 |
"special": true
|
| 42 |
+
},
|
| 43 |
+
"200005": {
|
| 44 |
"content": "<reponame>",
|
| 45 |
"lstrip": false,
|
| 46 |
"normalized": false,
|
| 47 |
"rstrip": false,
|
| 48 |
"single_word": false,
|
| 49 |
"special": true
|
| 50 |
+
},
|
| 51 |
+
"200006": {
|
| 52 |
"content": "<filename>",
|
| 53 |
"lstrip": false,
|
| 54 |
"normalized": false,
|
| 55 |
"rstrip": false,
|
| 56 |
"single_word": false,
|
| 57 |
"special": true
|
| 58 |
+
},
|
| 59 |
+
"200007": {
|
| 60 |
"content": "<gh_stars>",
|
| 61 |
"lstrip": false,
|
| 62 |
"normalized": false,
|
| 63 |
"rstrip": false,
|
| 64 |
"single_word": false,
|
| 65 |
"special": true
|
| 66 |
+
},
|
| 67 |
+
"200008": {
|
| 68 |
"content": "<issue_start>",
|
| 69 |
"lstrip": false,
|
| 70 |
"normalized": false,
|
| 71 |
"rstrip": false,
|
| 72 |
"single_word": false,
|
| 73 |
"special": true
|
| 74 |
+
},
|
| 75 |
+
"200009": {
|
| 76 |
"content": "<issue_comment>",
|
| 77 |
"lstrip": false,
|
| 78 |
"normalized": false,
|
| 79 |
"rstrip": false,
|
| 80 |
"single_word": false,
|
| 81 |
"special": true
|
| 82 |
+
},
|
| 83 |
+
"200010": {
|
| 84 |
"content": "<issue_closed>",
|
| 85 |
"lstrip": false,
|
| 86 |
"normalized": false,
|
| 87 |
"rstrip": false,
|
| 88 |
"single_word": false,
|
| 89 |
"special": true
|
| 90 |
+
},
|
| 91 |
+
"200011": {
|
| 92 |
+
"content": "<jupyter_start>",
|
| 93 |
+
"lstrip": false,
|
| 94 |
+
"normalized": false,
|
| 95 |
+
"rstrip": false,
|
| 96 |
+
"single_word": false,
|
| 97 |
+
"special": true
|
| 98 |
+
},
|
| 99 |
+
"200012": {
|
| 100 |
"content": "<jupyter_text>",
|
| 101 |
"lstrip": false,
|
| 102 |
"normalized": false,
|
| 103 |
"rstrip": false,
|
| 104 |
"single_word": false,
|
| 105 |
"special": true
|
| 106 |
+
},
|
| 107 |
+
"200013": {
|
| 108 |
"content": "<jupyter_code>",
|
| 109 |
"lstrip": false,
|
| 110 |
"normalized": false,
|
| 111 |
"rstrip": false,
|
| 112 |
"single_word": false,
|
| 113 |
"special": true
|
| 114 |
+
},
|
| 115 |
+
"200014": {
|
| 116 |
"content": "<jupyter_output>",
|
| 117 |
"lstrip": false,
|
| 118 |
"normalized": false,
|
| 119 |
"rstrip": false,
|
| 120 |
"single_word": false,
|
| 121 |
"special": true
|
| 122 |
+
},
|
| 123 |
+
"200015": {
|
| 124 |
+
"content": "<empty_output>",
|
| 125 |
+
"lstrip": false,
|
| 126 |
+
"normalized": false,
|
| 127 |
+
"rstrip": false,
|
| 128 |
+
"single_word": false,
|
| 129 |
+
"special": true
|
| 130 |
+
},
|
| 131 |
+
"200016": {
|
| 132 |
"content": "<commit_before>",
|
| 133 |
"lstrip": false,
|
| 134 |
"normalized": false,
|
| 135 |
"rstrip": false,
|
| 136 |
"single_word": false,
|
| 137 |
"special": true
|
| 138 |
+
},
|
| 139 |
+
"200017": {
|
| 140 |
"content": "<commit_msg>",
|
| 141 |
"lstrip": false,
|
| 142 |
"normalized": false,
|
| 143 |
"rstrip": false,
|
| 144 |
"single_word": false,
|
| 145 |
"special": true
|
| 146 |
+
},
|
| 147 |
+
"200018": {
|
| 148 |
"content": "<commit_after>",
|
| 149 |
"lstrip": false,
|
| 150 |
"normalized": false,
|
| 151 |
"rstrip": false,
|
| 152 |
"single_word": false,
|
| 153 |
"special": true
|
| 154 |
+
},
|
| 155 |
+
"200019": {
|
| 156 |
"content": "]~b]",
|
| 157 |
"lstrip": false,
|
| 158 |
"normalized": false,
|
| 159 |
"rstrip": false,
|
| 160 |
"single_word": false,
|
| 161 |
"special": true
|
| 162 |
+
},
|
| 163 |
+
"200020": {
|
| 164 |
"content": "[e~[",
|
| 165 |
"lstrip": false,
|
| 166 |
"normalized": false,
|
| 167 |
"rstrip": false,
|
| 168 |
"single_word": false,
|
| 169 |
"special": true
|
| 170 |
+
},
|
| 171 |
+
"200021": {
|
| 172 |
"content": "]!d~[",
|
| 173 |
"lstrip": false,
|
| 174 |
"normalized": false,
|
| 175 |
"rstrip": false,
|
| 176 |
"single_word": false,
|
| 177 |
"special": true
|
| 178 |
+
},
|
| 179 |
+
"200022": {
|
| 180 |
"content": "<function_call>",
|
| 181 |
"lstrip": false,
|
| 182 |
"normalized": false,
|
| 183 |
"rstrip": false,
|
| 184 |
"single_word": false,
|
| 185 |
"special": true
|
| 186 |
+
},
|
| 187 |
+
"200023": {
|
| 188 |
"content": "<code_interpreter>",
|
| 189 |
"lstrip": false,
|
| 190 |
"normalized": false,
|
| 191 |
"rstrip": false,
|
| 192 |
"single_word": false,
|
| 193 |
"special": true
|
| 194 |
+
},
|
| 195 |
+
"200024": {
|
| 196 |
"content": "]<]speech[>[",
|
| 197 |
"lstrip": false,
|
| 198 |
"normalized": false,
|
| 199 |
"rstrip": false,
|
| 200 |
"single_word": false,
|
| 201 |
"special": true
|
| 202 |
+
},
|
| 203 |
+
"200025": {
|
| 204 |
"content": "]<]image[>[",
|
| 205 |
"lstrip": false,
|
| 206 |
"normalized": false,
|
| 207 |
"rstrip": false,
|
| 208 |
"single_word": false,
|
| 209 |
"special": true
|
| 210 |
+
},
|
| 211 |
+
"200026": {
|
| 212 |
"content": "]<]video[>[",
|
| 213 |
"lstrip": false,
|
| 214 |
"normalized": false,
|
| 215 |
"rstrip": false,
|
| 216 |
"single_word": false,
|
| 217 |
"special": true
|
| 218 |
+
},
|
| 219 |
+
"200027": {
|
| 220 |
"content": "]<]start of speech[>[",
|
| 221 |
"lstrip": false,
|
| 222 |
"normalized": false,
|
| 223 |
"rstrip": false,
|
| 224 |
"single_word": false,
|
| 225 |
"special": true
|
| 226 |
+
},
|
| 227 |
+
"200028": {
|
| 228 |
"content": "]<]end of speech[>[",
|
| 229 |
"lstrip": false,
|
| 230 |
"normalized": false,
|
| 231 |
"rstrip": false,
|
| 232 |
"single_word": false,
|
| 233 |
"special": true
|
| 234 |
+
},
|
| 235 |
+
"200029": {
|
| 236 |
"content": "]<]start of image[>[",
|
| 237 |
"lstrip": false,
|
| 238 |
"normalized": false,
|
| 239 |
"rstrip": false,
|
| 240 |
"single_word": false,
|
| 241 |
"special": true
|
| 242 |
+
},
|
| 243 |
+
"200030": {
|
| 244 |
"content": "]<]end of image[>[",
|
| 245 |
"lstrip": false,
|
| 246 |
"normalized": false,
|
| 247 |
"rstrip": false,
|
| 248 |
"single_word": false,
|
| 249 |
"special": true
|
| 250 |
+
},
|
| 251 |
+
"200031": {
|
| 252 |
"content": "]<]start of video[>[",
|
| 253 |
"lstrip": false,
|
| 254 |
"normalized": false,
|
| 255 |
"rstrip": false,
|
| 256 |
"single_word": false,
|
| 257 |
"special": true
|
| 258 |
+
},
|
| 259 |
+
"200032": {
|
| 260 |
"content": "]<]end of video[>[",
|
| 261 |
"lstrip": false,
|
| 262 |
"normalized": false,
|
| 263 |
"rstrip": false,
|
| 264 |
"single_word": false,
|
| 265 |
"special": true
|
| 266 |
+
},
|
| 267 |
+
"200033": {
|
| 268 |
"content": "]<]vision pad[>[",
|
| 269 |
"lstrip": false,
|
| 270 |
"normalized": false,
|
| 271 |
"rstrip": false,
|
| 272 |
"single_word": false,
|
| 273 |
"special": true
|
| 274 |
+
},
|
| 275 |
+
"200034": {
|
| 276 |
"content": "]~!b[",
|
| 277 |
"lstrip": false,
|
| 278 |
"normalized": false,
|
| 279 |
"rstrip": false,
|
| 280 |
"single_word": false,
|
| 281 |
"special": true
|
| 282 |
+
},
|
| 283 |
+
"200035": {
|
| 284 |
"content": "<jupyter_error>",
|
| 285 |
"lstrip": false,
|
| 286 |
"normalized": false,
|
| 287 |
"rstrip": false,
|
| 288 |
"single_word": false,
|
| 289 |
"special": true
|
| 290 |
+
},
|
| 291 |
+
"200036": {
|
| 292 |
"content": "<add_file>",
|
| 293 |
+
"single_word": false,
|
| 294 |
"lstrip": false,
|
|
|
|
| 295 |
"rstrip": false,
|
| 296 |
+
"normalized": false,
|
| 297 |
"special": true
|
| 298 |
+
},
|
| 299 |
+
"200037": {
|
| 300 |
"content": "<delete_file>",
|
| 301 |
"lstrip": false,
|
| 302 |
"normalized": false,
|
| 303 |
"rstrip": false,
|
| 304 |
"single_word": false,
|
| 305 |
"special": true
|
| 306 |
+
},
|
| 307 |
+
"200038": {
|
| 308 |
"content": "<rename_file>",
|
| 309 |
"lstrip": false,
|
| 310 |
"normalized": false,
|
| 311 |
"rstrip": false,
|
| 312 |
"single_word": false,
|
| 313 |
"special": true
|
| 314 |
+
},
|
| 315 |
+
"200039": {
|
| 316 |
"content": "<edit_file>",
|
| 317 |
"lstrip": false,
|
| 318 |
"normalized": false,
|
| 319 |
"rstrip": false,
|
| 320 |
"single_word": false,
|
| 321 |
"special": true
|
| 322 |
+
},
|
| 323 |
+
"200040": {
|
| 324 |
"content": "<commit_message>",
|
| 325 |
"lstrip": false,
|
| 326 |
"normalized": false,
|
| 327 |
"rstrip": false,
|
| 328 |
"single_word": false,
|
| 329 |
"special": true
|
| 330 |
+
},
|
| 331 |
+
"200041": {
|
| 332 |
"content": "<empty_source_file>",
|
| 333 |
"lstrip": false,
|
| 334 |
"normalized": false,
|
| 335 |
"rstrip": false,
|
| 336 |
"single_word": false,
|
| 337 |
"special": true
|
| 338 |
+
},
|
| 339 |
+
"200042": {
|
| 340 |
"content": "<repo_struct>",
|
| 341 |
"lstrip": false,
|
| 342 |
"normalized": false,
|
| 343 |
"rstrip": false,
|
| 344 |
"single_word": false,
|
| 345 |
"special": true
|
| 346 |
+
},
|
| 347 |
+
"200043": {
|
| 348 |
+
"content": "<code_context>",
|
| 349 |
+
"single_word": false,
|
| 350 |
+
"lstrip": false,
|
| 351 |
+
"rstrip": false,
|
| 352 |
+
"normalized": false,
|
| 353 |
+
"special": true
|
| 354 |
+
},
|
| 355 |
+
"200044": {
|
| 356 |
+
"content": "<file_content>",
|
| 357 |
+
"single_word": false,
|
| 358 |
+
"lstrip": false,
|
| 359 |
+
"rstrip": false,
|
| 360 |
+
"normalized": false,
|
| 361 |
+
"special": true
|
| 362 |
+
},
|
| 363 |
+
"200045": {
|
| 364 |
+
"content": "<source_files>",
|
| 365 |
+
"single_word": false,
|
| 366 |
+
"lstrip": false,
|
| 367 |
+
"rstrip": false,
|
| 368 |
+
"normalized": false,
|
| 369 |
+
"special": true
|
| 370 |
+
},
|
| 371 |
+
"200046": {
|
| 372 |
+
"content": "<pr_start>",
|
| 373 |
+
"single_word": false,
|
| 374 |
+
"lstrip": false,
|
| 375 |
+
"rstrip": false,
|
| 376 |
+
"normalized": false,
|
| 377 |
+
"special": true
|
| 378 |
+
},
|
| 379 |
+
"200047": {
|
| 380 |
+
"content": "<review_comment>",
|
| 381 |
+
"single_word": false,
|
| 382 |
+
"lstrip": false,
|
| 383 |
+
"rstrip": false,
|
| 384 |
+
"normalized": false,
|
| 385 |
+
"special": true
|
| 386 |
+
},
|
| 387 |
+
"200048": {
|
| 388 |
+
"content": "<filepath>",
|
| 389 |
+
"single_word": false,
|
| 390 |
+
"lstrip": false,
|
| 391 |
+
"rstrip": false,
|
| 392 |
+
"normalized": false,
|
| 393 |
+
"special": true
|
| 394 |
+
},
|
| 395 |
+
"200049": {
|
| 396 |
+
"content": "<file_sep>",
|
| 397 |
+
"single_word": false,
|
| 398 |
+
"lstrip": false,
|
| 399 |
+
"rstrip": false,
|
| 400 |
+
"normalized": false,
|
| 401 |
+
"special": true
|
| 402 |
+
},
|
| 403 |
+
"200050": {
|
| 404 |
+
"content": "<think>",
|
| 405 |
+
"single_word": false,
|
| 406 |
+
"lstrip": false,
|
| 407 |
+
"rstrip": false,
|
| 408 |
+
"normalized": false,
|
| 409 |
+
"special": false
|
| 410 |
+
},
|
| 411 |
+
"200051": {
|
| 412 |
+
"content": "</think>",
|
| 413 |
+
"single_word": false,
|
| 414 |
+
"lstrip": false,
|
| 415 |
+
"rstrip": false,
|
| 416 |
+
"normalized": false,
|
| 417 |
+
"special": false
|
| 418 |
+
},
|
| 419 |
+
"200052": {
|
| 420 |
+
"content": "<minimax:tool_call>",
|
| 421 |
+
"single_word": false,
|
| 422 |
+
"lstrip": false,
|
| 423 |
+
"rstrip": false,
|
| 424 |
+
"normalized": false,
|
| 425 |
+
"special": false
|
| 426 |
+
},
|
| 427 |
+
"200053": {
|
| 428 |
+
"content": "</minimax:tool_call>",
|
| 429 |
+
"single_word": false,
|
| 430 |
+
"lstrip": false,
|
| 431 |
+
"rstrip": false,
|
| 432 |
+
"normalized": false,
|
| 433 |
+
"special": false
|
| 434 |
+
}
|
| 435 |
},
|
| 436 |
"additional_special_tokens": [
|
| 437 |
+
"<code_interpreter>",
|
| 438 |
+
"<commit_after>",
|
| 439 |
+
"<commit_before>",
|
| 440 |
+
"<commit_msg>",
|
| 441 |
+
"<empty_output>",
|
| 442 |
+
"<filename>",
|
| 443 |
+
"<fim_middle>",
|
| 444 |
+
"<fim_pad>",
|
| 445 |
+
"<fim_prefix>",
|
| 446 |
+
"<fim_suffix>",
|
| 447 |
+
"<function_call>",
|
| 448 |
+
"<gh_stars>",
|
| 449 |
+
"]<]speech[>[",
|
| 450 |
+
"]<]image[>[",
|
| 451 |
+
"]<]video[>[",
|
| 452 |
+
"]<]start of speech[>[",
|
| 453 |
+
"]<]end of speech[>[",
|
| 454 |
+
"]<]start of image[>[",
|
| 455 |
+
"]<]end of image[>[",
|
| 456 |
+
"]<]start of video[>[",
|
| 457 |
+
"]<]end of video[>[",
|
| 458 |
+
"]<]vision pad[>[",
|
| 459 |
+
"]~!b[",
|
| 460 |
+
"<issue_closed>",
|
| 461 |
+
"<issue_comment>",
|
| 462 |
+
"<issue_start>",
|
| 463 |
+
"<jupyter_code>",
|
| 464 |
+
"<jupyter_output>",
|
| 465 |
+
"<jupyter_start>",
|
| 466 |
+
"<jupyter_text>",
|
| 467 |
+
"<reponame>",
|
| 468 |
+
"[e~[",
|
| 469 |
+
"]!d~[",
|
| 470 |
+
"]!p~[",
|
| 471 |
+
"]~b]",
|
| 472 |
+
"<jupyter_error>",
|
| 473 |
+
"<add_file>",
|
| 474 |
+
"<delete_file>",
|
| 475 |
+
"<rename_file>",
|
| 476 |
+
"<edit_file>",
|
| 477 |
+
"<commit_message>",
|
| 478 |
+
"<empty_source_file>",
|
| 479 |
+
"<repo_struct>",
|
| 480 |
+
"<code_context>",
|
| 481 |
+
"<file_content>",
|
| 482 |
+
"<source_files>",
|
| 483 |
+
"<pr_start>",
|
| 484 |
+
"<review_comment>",
|
| 485 |
+
"<filepath>",
|
| 486 |
+
"<file_sep>"
|
| 487 |
+
],
|
| 488 |
+
"add_prefix_space": false,
|
| 489 |
"bos_token": "]~!b[",
|
| 490 |
"clean_up_tokenization_spaces": false,
|
| 491 |
"eos_token": "[e~[",
|
| 492 |
+
"model_max_length": 40960000,
|
|
|
|
|
|
|
|
|
|
| 493 |
"tokenizer_class": "GPT2Tokenizer",
|
| 494 |
+
"unk_token": "]!d~["
|
| 495 |
+
}
|
|
|
vocab.json
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|