From 5d772a085cbc7621e52de00727a056a893bd1b47 Mon Sep 17 00:00:00 2001 From: ZHU QIHAO <18811325956@163.com> Date: Fri, 27 Oct 2023 14:06:57 +0800 Subject: [PATCH] Update README.md --- README.md | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 34cea0a..43d8b75 100644 --- a/README.md +++ b/README.md @@ -48,13 +48,31 @@ Here give some examples of how to use our model. #### Code Completion ```python from transformers import AutoTokenizer, AutoModelForCausalLM -tokenizer = AutoTokenizer.from_pretrained("deepseek/deepseek-coder-7b") -device = 0 if torch.cuda.is_available() else -1 -model = AutoModelForCausalLM.from_pretrained("deepseek/deepseek-coder-7b").to(device) -inputs = tokenizer("def hello_world():", return_tensors="pt").to(device) +import torch +tokenizer = AutoTokenizer.from_pretrained("deepseek/deepseek-coder-7b-base", trust_remote_code=True) +device = 2 if torch.cuda.is_available() else -1 +model = AutoModelForCausalLM.from_pretrained("deepseek/deepseek-coder-7b-base", trust_remote_code=True).to(device) +inputs = tokenizer("#write a quick sort algorithm", return_tensors="pt").to(device) outputs = model.generate(**inputs, max_length=128) print(tokenizer.decode(outputs[0], skip_special_tokens=True)) ``` +This code will output +```python +#write a quick sort algorithm + +def quick_sort(arr): + if len(arr) <= 1: + return arr + pivot = arr[0] + left = [] + right = [] + for i in range(1, len(arr)): + if arr[i] < pivot: + left.append(arr[i]) + else: + right.append(arr[i]) + return quick_sort(left) + [pivot] + quick_sort(right) +``` #### Code Insertion ```python