KaizeShi commited on
Commit
32800b9
·
verified ·
1 Parent(s): bfe7d8b

Delete utils

Browse files
Files changed (4) hide show
  1. utils/README.md +0 -13
  2. utils/__init__.py +0 -0
  3. utils/callbacks.py +0 -75
  4. utils/prompter.py +0 -51
utils/README.md DELETED
@@ -1,13 +0,0 @@
1
- # Directory for helpers modules
2
-
3
- ## prompter.py
4
-
5
- Prompter class, a template manager.
6
-
7
- `from utils.prompter import Prompter`
8
-
9
- ## callbacks.py
10
-
11
- Helpers to support streaming generate output.
12
-
13
- `from utils.callbacks import Iteratorize, Stream`
 
 
 
 
 
 
 
 
 
 
 
 
 
 
utils/__init__.py DELETED
File without changes
utils/callbacks.py DELETED
@@ -1,75 +0,0 @@
1
- """
2
- Helpers to support streaming generate output.
3
- Borrowed from https://github.com/oobabooga/text-generation-webui/blob/ad37f396fc8bcbab90e11ecf17c56c97bfbd4a9c/modules/callbacks.py
4
- """
5
-
6
- import gc
7
- import traceback
8
- from queue import Queue
9
- from threading import Thread
10
-
11
- import torch
12
- import transformers
13
-
14
-
15
- class Stream(transformers.StoppingCriteria):
16
- def __init__(self, callback_func=None):
17
- self.callback_func = callback_func
18
-
19
- def __call__(self, input_ids, scores) -> bool:
20
- if self.callback_func is not None:
21
- self.callback_func(input_ids[0])
22
- return False
23
-
24
-
25
- class Iteratorize:
26
-
27
- """
28
- Transforms a function that takes a callback
29
- into a lazy iterator (generator).
30
- """
31
-
32
- def __init__(self, func, kwargs={}, callback=None):
33
- self.mfunc = func
34
- self.c_callback = callback
35
- self.q = Queue()
36
- self.sentinel = object()
37
- self.kwargs = kwargs
38
- self.stop_now = False
39
-
40
- def _callback(val):
41
- if self.stop_now:
42
- raise ValueError
43
- self.q.put(val)
44
-
45
- def gentask():
46
- try:
47
- ret = self.mfunc(callback=_callback, **self.kwargs)
48
- except ValueError:
49
- pass
50
- except:
51
- traceback.print_exc()
52
- pass
53
-
54
- self.q.put(self.sentinel)
55
- if self.c_callback:
56
- self.c_callback(ret)
57
-
58
- self.thread = Thread(target=gentask)
59
- self.thread.start()
60
-
61
- def __iter__(self):
62
- return self
63
-
64
- def __next__(self):
65
- obj = self.q.get(True, None)
66
- if obj is self.sentinel:
67
- raise StopIteration
68
- else:
69
- return obj
70
-
71
- def __enter__(self):
72
- return self
73
-
74
- def __exit__(self, exc_type, exc_val, exc_tb):
75
- self.stop_now = True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
utils/prompter.py DELETED
@@ -1,51 +0,0 @@
1
- """
2
- A dedicated helper to manage templates and prompt building.
3
- """
4
-
5
- import json
6
- import os.path as osp
7
- from typing import Union
8
-
9
-
10
- class Prompter(object):
11
- __slots__ = ("template", "_verbose")
12
-
13
- def __init__(self, template_name: str = "", verbose: bool = False):
14
- self._verbose = verbose
15
- if not template_name:
16
- # Enforce the default here, so the constructor can be called with '' and will not break.
17
- template_name = "alpaca"
18
- file_name = osp.join("templates", f"{template_name}.json")
19
- if not osp.exists(file_name):
20
- raise ValueError(f"Can't read {file_name}")
21
- with open(file_name) as fp:
22
- self.template = json.load(fp)
23
- if self._verbose:
24
- print(
25
- f"Using prompt template {template_name}: {self.template['description']}"
26
- )
27
-
28
- def generate_prompt(
29
- self,
30
- instruction: str,
31
- input: Union[None, str] = None,
32
- label: Union[None, str] = None,
33
- ) -> str:
34
- # returns the full prompt from instruction and optional input
35
- # if a label (=response, =output) is provided, it's also appended.
36
- if input:
37
- res = self.template["prompt_input"].format(
38
- instruction=instruction, input=input
39
- )
40
- else:
41
- res = self.template["prompt_no_input"].format(
42
- instruction=instruction
43
- )
44
- if label:
45
- res = f"{res}{label}"
46
- if self._verbose:
47
- print(res)
48
- return res
49
-
50
- def get_response(self, output: str) -> str:
51
- return output.split(self.template["response_split"])[1].strip()