初始化项目,由ModelHub XC社区提供模型
Model: FreedomIntelligence/HuatuoGPT2-7B Source: Original Platform
This commit is contained in:
34
README.md
Normal file
34
README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
---
|
||||
language:
|
||||
- zh
|
||||
license: apache-2.0
|
||||
tasks:
|
||||
- text-generation
|
||||
---
|
||||
|
||||
<!-- markdownlint-disable first-line-h1 -->
|
||||
<!-- markdownlint-disable html -->
|
||||
<div align="center">
|
||||
<h1>
|
||||
HuatuoGPT2-7B
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
<div align="center">
|
||||
<a href="https://github.com/FreedomIntelligence/HuatuoGPT-II" target="_blank">GitHub</a> | <a href="https://arxiv.org/pdf/2311.09774.pdf" target="_blank">Our Paper</a>
|
||||
</div>
|
||||
|
||||
# <span id="Start">Quick Start</span>
|
||||
|
||||
```Python
|
||||
import torch
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||
from transformers.generation.utils import GenerationConfig
|
||||
tokenizer = AutoTokenizer.from_pretrained("FreedomIntelligence/HuatuoGPT2-7B", use_fast=True, trust_remote_code=True)
|
||||
model = AutoModelForCausalLM.from_pretrained("FreedomIntelligence/HuatuoGPT2-7B", device_map="auto", torch_dtype=torch.bfloat16, trust_remote_code=True)
|
||||
model.generation_config = GenerationConfig.from_pretrained("FreedomIntelligence/HuatuoGPT2-7B")
|
||||
messages = []
|
||||
messages.append({"role": "user", "content": "肚子疼怎么办?"})
|
||||
response = model.HuatuoChat(tokenizer, messages)
|
||||
print(response)
|
||||
```
|
||||
Reference in New Issue
Block a user