Upload app.py
Browse files
app.py
CHANGED
@@ -3,15 +3,15 @@ from llama_cpp import Llama
|
|
3 |
|
4 |
llm = Llama(model_path="vicuna-AlekseyKorshuk-7B-GPTQ-4bit-128g.GGML.bin", n_ctx=2048)
|
5 |
|
6 |
-
def generate_text(
|
7 |
-
output = llm(prompt, max_tokens=
|
8 |
text = output['choices'][0]['text']
|
9 |
return text
|
10 |
|
11 |
input_text = gr.inputs.Textbox(lines= 10, label="Enter your input text")
|
12 |
output_text = gr.outputs.Textbox(label="Output text")
|
13 |
|
14 |
-
description = "
|
15 |
|
16 |
examples = [
|
17 |
["What is the capital of France? ", "The capital of France is Paris."],
|
|
|
3 |
|
4 |
llm = Llama(model_path="vicuna-AlekseyKorshuk-7B-GPTQ-4bit-128g.GGML.bin", n_ctx=2048)
|
5 |
|
6 |
+
def generate_text(prompt):
|
7 |
+
output = llm(prompt, max_tokens=468, temperature=0.1, top_p=0.5, echo=False, stop=["#"])
|
8 |
text = output['choices'][0]['text']
|
9 |
return text
|
10 |
|
11 |
input_text = gr.inputs.Textbox(lines= 10, label="Enter your input text")
|
12 |
output_text = gr.outputs.Textbox(label="Output text")
|
13 |
|
14 |
+
description = "Vicuna-7B-GPTQ-4bit-128g.GGML, max_tokens=468, temperature=0.1, top_p=0.5"
|
15 |
|
16 |
examples = [
|
17 |
["What is the capital of France? ", "The capital of France is Paris."],
|