File size: 3,954 Bytes
833dac3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
"""
Utility module for caching LLM responses
"""
import os
import json
import hashlib
from typing import Dict, Any, Optional
from config import CACHE_ENABLED, DEBUG_MODE

# Cache directory
CACHE_DIR = "cache"
os.makedirs(CACHE_DIR, exist_ok=True)

def generate_cache_key(prompt_type: str, params: Dict[str, Any]) -> str:
    """
    Generate a cache key based on prompt type and parameters
    
    Args:
        prompt_type: Prompt type, such as 'decompose' or 'explain'
        params: Prompt parameters
        
    Returns:
        Cache key string
    """
    # Convert parameters to a standardized JSON string
    params_str = json.dumps(params, sort_keys=True, ensure_ascii=False)
    
    # Calculate hash value
    hash_obj = hashlib.md5(f"{prompt_type}:{params_str}".encode('utf-8'))
    return hash_obj.hexdigest()

def save_to_cache(cache_key: str, data: Dict[str, Any]) -> None:
    """
    Save data to cache file
    
    Args:
        cache_key: Cache key
        data: Data to be cached
    """
    cache_path = os.path.join(CACHE_DIR, f"{cache_key}.json")
    
    with open(cache_path, 'w', encoding='utf-8') as f:
        json.dump(data, f, ensure_ascii=False, indent=2)

def load_from_cache(cache_key: str) -> Optional[Dict[str, Any]]:
    """
    Load data from cache
    
    Args:
        cache_key: Cache key
        
    Returns:
        Cached data, or None if it doesn't exist
    """
    cache_path = os.path.join(CACHE_DIR, f"{cache_key}.json")
    
    if not os.path.exists(cache_path):
        return None
    
    try:
        with open(cache_path, 'r', encoding='utf-8') as f:
            return json.load(f)
    except (json.JSONDecodeError, IOError):
        # Return None if file is corrupted or cannot be read
        return None

def cached_llm_call(prompt_type: str, params: Dict[str, Any], call_function) -> Dict[str, Any]:
    """
    Cache decorator for LLM calls
    
    Args:
        prompt_type: Prompt type
        params: Prompt parameters
        call_function: Actual function to call LLM
        
    Returns:
        LLM response or cached response
    """
    # Generate cache key
    cache_key = generate_cache_key(prompt_type, params)
    
    # Try to load from cache
    cached_result = load_from_cache(cache_key)
    if cached_result:
        print(f"[Cache] Using cached response: {prompt_type}")
        return cached_result
    
    # If not cached, call LLM
    result = call_function(params)
    
    # Save to cache
    save_to_cache(cache_key, result)
    
    return result

def get_from_cache(cache_key: str) -> Optional[str]:
    """
    Get data directly from cache
    
    Args:
        cache_key: Cache key (can be a string)
        
    Returns:
        Cached data, or None if it doesn't exist
    """
    if not CACHE_ENABLED:
        return None
        
    # Hash the cache key
    hash_obj = hashlib.md5(cache_key.encode('utf-8'))
    hashed_key = hash_obj.hexdigest()
    
    cache_path = os.path.join(CACHE_DIR, f"{hashed_key}.json")
    
    if not os.path.exists(cache_path):
        return None
    
    try:
        with open(cache_path, 'r', encoding='utf-8') as f:
            if DEBUG_MODE:
                print(f"Loading from cache: {cache_key[:30]}...")
            return f.read()
    except (IOError):
        # Return None if file cannot be read
        return None

def save_to_cache(cache_key: str, data: str) -> None:
    """
    Save data to cache file
    
    Args:
        cache_key: Cache key (can be a string)
        data: Data string to cache
    """
    if not CACHE_ENABLED:
        return
        
    # Hash the cache key
    hash_obj = hashlib.md5(cache_key.encode('utf-8'))
    hashed_key = hash_obj.hexdigest()
    
    cache_path = os.path.join(CACHE_DIR, f"{hashed_key}.json")
    
    with open(cache_path, 'w', encoding='utf-8') as f:
        f.write(data)
        if DEBUG_MODE:
            print(f"Data cached: {cache_key[:30]}...")