lukestanley commited on
Commit
12c7670
·
1 Parent(s): ca6258e

Move constants

Browse files
Files changed (1) hide show
  1. app.py +10 -8
app.py CHANGED
@@ -32,14 +32,6 @@ LLM_WORKER = env.get("LLM_WORKER", "runpod")
32
  if LLM_WORKER == "http" or LLM_WORKER == "in_memory":
33
  inference_binary_check()
34
 
35
- # Now chill can import llama-cpp-python without an error:
36
- from chill import improvement_loop
37
-
38
-
39
- def chill_out(text):
40
- print("Got this input:", text)
41
- return str(improvement_loop(text))
42
-
43
  examples = [
44
  ["You guys are so slow, we will never ship it!"],
45
  ["Your idea of a balanced diet is a biscuit in each hand."]
@@ -73,6 +65,16 @@ Help make the internet a kinder place, one comment at a time.
73
  Your contribution could make a big difference!
74
  """
75
 
 
 
 
 
 
 
 
 
 
 
76
  demo = gr.Interface(
77
  fn=chill_out,
78
  inputs="text",
 
32
  if LLM_WORKER == "http" or LLM_WORKER == "in_memory":
33
  inference_binary_check()
34
 
 
 
 
 
 
 
 
 
35
  examples = [
36
  ["You guys are so slow, we will never ship it!"],
37
  ["Your idea of a balanced diet is a biscuit in each hand."]
 
65
  Your contribution could make a big difference!
66
  """
67
 
68
+
69
+ # Now chill can import llama-cpp-python without an error:
70
+ from chill import improvement_loop
71
+
72
+
73
+ def chill_out(text):
74
+ print("Got this input:", text)
75
+ return str(improvement_loop(text))
76
+
77
+
78
  demo = gr.Interface(
79
  fn=chill_out,
80
  inputs="text",