Transformers
PyTorch
Safetensors
Chinese
t5
text2text-generation
prompt
Text2Text-Generation
text-generation-inference
Instructions to use mxmax/Chinese_Chat_T5_Base with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use mxmax/Chinese_Chat_T5_Base with Transformers:
# Load model directly from transformers import AutoTokenizer, AutoModelForSeq2SeqLM tokenizer = AutoTokenizer.from_pretrained("mxmax/Chinese_Chat_T5_Base") model = AutoModelForSeq2SeqLM.from_pretrained("mxmax/Chinese_Chat_T5_Base") - Notebooks
- Google Colab
- Kaggle
| { | |
| "<extra_id_0>": 32227, | |
| "<extra_id_10>": 32217, | |
| "<extra_id_11>": 32216, | |
| "<extra_id_12>": 32215, | |
| "<extra_id_13>": 32214, | |
| "<extra_id_14>": 32213, | |
| "<extra_id_15>": 32212, | |
| "<extra_id_16>": 32211, | |
| "<extra_id_17>": 32210, | |
| "<extra_id_18>": 32209, | |
| "<extra_id_19>": 32208, | |
| "<extra_id_1>": 32226, | |
| "<extra_id_20>": 32207, | |
| "<extra_id_21>": 32206, | |
| "<extra_id_22>": 32205, | |
| "<extra_id_23>": 32204, | |
| "<extra_id_24>": 32203, | |
| "<extra_id_25>": 32202, | |
| "<extra_id_26>": 32201, | |
| "<extra_id_27>": 32200, | |
| "<extra_id_28>": 32199, | |
| "<extra_id_29>": 32198, | |
| "<extra_id_2>": 32225, | |
| "<extra_id_30>": 32197, | |
| "<extra_id_31>": 32196, | |
| "<extra_id_32>": 32195, | |
| "<extra_id_33>": 32194, | |
| "<extra_id_34>": 32193, | |
| "<extra_id_35>": 32192, | |
| "<extra_id_36>": 32191, | |
| "<extra_id_37>": 32190, | |
| "<extra_id_38>": 32189, | |
| "<extra_id_39>": 32188, | |
| "<extra_id_3>": 32224, | |
| "<extra_id_40>": 32187, | |
| "<extra_id_41>": 32186, | |
| "<extra_id_42>": 32185, | |
| "<extra_id_43>": 32184, | |
| "<extra_id_44>": 32183, | |
| "<extra_id_45>": 32182, | |
| "<extra_id_46>": 32181, | |
| "<extra_id_47>": 32180, | |
| "<extra_id_48>": 32179, | |
| "<extra_id_49>": 32178, | |
| "<extra_id_4>": 32223, | |
| "<extra_id_50>": 32177, | |
| "<extra_id_51>": 32176, | |
| "<extra_id_52>": 32175, | |
| "<extra_id_53>": 32174, | |
| "<extra_id_54>": 32173, | |
| "<extra_id_55>": 32172, | |
| "<extra_id_56>": 32171, | |
| "<extra_id_57>": 32170, | |
| "<extra_id_58>": 32169, | |
| "<extra_id_59>": 32168, | |
| "<extra_id_5>": 32222, | |
| "<extra_id_60>": 32167, | |
| "<extra_id_61>": 32166, | |
| "<extra_id_62>": 32165, | |
| "<extra_id_63>": 32164, | |
| "<extra_id_64>": 32163, | |
| "<extra_id_65>": 32162, | |
| "<extra_id_66>": 32161, | |
| "<extra_id_67>": 32160, | |
| "<extra_id_68>": 32159, | |
| "<extra_id_69>": 32158, | |
| "<extra_id_6>": 32221, | |
| "<extra_id_70>": 32157, | |
| "<extra_id_71>": 32156, | |
| "<extra_id_72>": 32155, | |
| "<extra_id_73>": 32154, | |
| "<extra_id_74>": 32153, | |
| "<extra_id_75>": 32152, | |
| "<extra_id_76>": 32151, | |
| "<extra_id_77>": 32150, | |
| "<extra_id_78>": 32149, | |
| "<extra_id_79>": 32148, | |
| "<extra_id_7>": 32220, | |
| "<extra_id_80>": 32147, | |
| "<extra_id_81>": 32146, | |
| "<extra_id_82>": 32145, | |
| "<extra_id_83>": 32144, | |
| "<extra_id_84>": 32143, | |
| "<extra_id_85>": 32142, | |
| "<extra_id_86>": 32141, | |
| "<extra_id_87>": 32140, | |
| "<extra_id_88>": 32139, | |
| "<extra_id_89>": 32138, | |
| "<extra_id_8>": 32219, | |
| "<extra_id_90>": 32137, | |
| "<extra_id_91>": 32136, | |
| "<extra_id_92>": 32135, | |
| "<extra_id_93>": 32134, | |
| "<extra_id_94>": 32133, | |
| "<extra_id_95>": 32132, | |
| "<extra_id_96>": 32131, | |
| "<extra_id_97>": 32130, | |
| "<extra_id_98>": 32129, | |
| "<extra_id_99>": 32128, | |
| "<extra_id_9>": 32218 | |
| } | |