1<?php 2 3namespace dokuwiki\plugin\aichat; 4 5use dokuwiki\Extension\PluginInterface; 6use dokuwiki\plugin\aichat\Model\ChatInterface; 7use dokuwiki\plugin\aichat\Model\EmbeddingInterface; 8use dokuwiki\plugin\aichat\Storage\AbstractStorage; 9use dokuwiki\Search\Indexer; 10use splitbrain\phpcli\CLI; 11use TikToken\Encoder; 12use Vanderlee\Sentence\Sentence; 13 14/** 15 * Manage the embeddings index 16 * 17 * Pages are split into chunks of 1000 tokens each. For each chunk the embedding vector is fetched from 18 * OpenAI and stored in the Storage backend. 19 */ 20class Embeddings 21{ 22 /** @var int maximum overlap between chunks in tokens */ 23 final public const MAX_OVERLAP_LEN = 200; 24 25 /** @var ChatInterface */ 26 protected $chatModel; 27 28 /** @var EmbeddingInterface */ 29 protected $embedModel; 30 31 /** @var CLI|null */ 32 protected $logger; 33 /** @var Encoder */ 34 protected $tokenEncoder; 35 36 /** @var AbstractStorage */ 37 protected $storage; 38 39 /** @var array remember sentences when chunking */ 40 private $sentenceQueue = []; 41 42 protected $configChunkSize; 43 protected $configContextChunks; 44 45 /** 46 * Embeddings constructor. 47 * 48 * @param ChatInterface $chatModel 49 * @param EmbeddingInterface $embedModel 50 * @param AbstractStorage $storage 51 * @param array $config The plugin configuration 52 */ 53 public function __construct( 54 ChatInterface $chatModel, 55 EmbeddingInterface $embedModel, 56 AbstractStorage $storage, 57 $config 58 ) 59 { 60 $this->chatModel = $chatModel; 61 $this->embedModel = $embedModel; 62 $this->storage = $storage; 63 $this->configChunkSize = $config['chunkSize']; 64 $this->configContextChunks = $config['contextChunks']; 65 } 66 67 /** 68 * Access storage 69 * 70 * @return AbstractStorage 71 */ 72 public function getStorage() 73 { 74 return $this->storage; 75 } 76 77 /** 78 * Add a logger instance 79 * 80 * @return void 81 */ 82 public function setLogger(CLI $logger) 83 { 84 $this->logger = $logger; 85 } 86 87 /** 88 * Get the token encoder instance 89 * 90 * @return Encoder 91 */ 92 public function getTokenEncoder() 93 { 94 if (!$this->tokenEncoder instanceof Encoder) { 95 $this->tokenEncoder = new Encoder(); 96 } 97 return $this->tokenEncoder; 98 } 99 100 /** 101 * Return the chunk size to use 102 * 103 * @return int 104 */ 105 public function getChunkSize() 106 { 107 return min( 108 floor($this->chatModel->getMaxInputTokenLength() / 4), // be able to fit 4 chunks into the max input 109 floor($this->embedModel->getMaxInputTokenLength() * 0.9), // only use 90% of the embedding model to be safe 110 $this->configChunkSize, // this is usually the smallest 111 ); 112 } 113 114 /** 115 * Update the embeddings storage 116 * 117 * @param string $skipRE Regular expression to filter out pages (full RE with delimiters) 118 * @param string $matchRE Regular expression pages have to match to be included (full RE with delimiters) 119 * @param bool $clear Should any existing storage be cleared before updating? 120 * @return void 121 * @throws \Exception 122 */ 123 public function createNewIndex($skipRE = '', $matchRE = '', $clear = false) 124 { 125 $indexer = new Indexer(); 126 $pages = $indexer->getPages(); 127 128 $this->storage->startCreation($clear); 129 foreach ($pages as $pid => $page) { 130 $chunkID = $pid * 100; // chunk IDs start at page ID * 100 131 132 if ( 133 !page_exists($page) || 134 isHiddenPage($page) || 135 filesize(wikiFN($page)) < 150 || // skip very small pages 136 ($skipRE && preg_match($skipRE, (string)$page)) || 137 ($matchRE && !preg_match($matchRE, ":$page")) 138 ) { 139 // this page should not be in the index (anymore) 140 $this->storage->deletePageChunks($page, $chunkID); 141 continue; 142 } 143 144 $firstChunk = $this->storage->getChunk($chunkID); 145 if ($firstChunk && @filemtime(wikiFN($page)) < $firstChunk->getCreated()) { 146 // page is older than the chunks we have, reuse the existing chunks 147 $this->storage->reusePageChunks($page, $chunkID); 148 if ($this->logger instanceof CLI) $this->logger->info("Reusing chunks for $page"); 149 } else { 150 // page is newer than the chunks we have, create new chunks 151 $this->storage->deletePageChunks($page, $chunkID); 152 $chunks = $this->createPageChunks($page, $chunkID); 153 if ($chunks) $this->storage->addPageChunks($chunks); 154 } 155 } 156 $this->storage->finalizeCreation(); 157 } 158 159 /** 160 * Split the given page, fetch embedding vectors and return Chunks 161 * 162 * Will use the text renderer plugin if available to get the rendered text. 163 * Otherwise the raw wiki text is used. 164 * 165 * @param string $page Name of the page to split 166 * @param int $firstChunkID The ID of the first chunk of this page 167 * @return Chunk[] A list of chunks created for this page 168 * @throws \Exception 169 */ 170 protected function createPageChunks($page, $firstChunkID) 171 { 172 $chunkList = []; 173 174 $textRenderer = plugin_load('renderer', 'text'); 175 if ($textRenderer instanceof PluginInterface) { 176 global $ID; 177 $ID = $page; 178 $text = p_cached_output(wikiFN($page), 'text', $page); 179 } else { 180 $text = rawWiki($page); 181 } 182 183 $parts = $this->splitIntoChunks($text); 184 foreach ($parts as $part) { 185 if (trim((string)$part) == '') continue; // skip empty chunks 186 187 try { 188 $embedding = $this->embedModel->getEmbedding($part); 189 } catch (\Exception $e) { 190 if ($this->logger instanceof CLI) { 191 $this->logger->error( 192 'Failed to get embedding for chunk of page {page}: {msg}', 193 ['page' => $page, 'msg' => $e->getMessage()] 194 ); 195 } 196 continue; 197 } 198 $chunkList[] = new Chunk($page, $firstChunkID, $part, $embedding); 199 $firstChunkID++; 200 } 201 if ($this->logger instanceof CLI) { 202 if ($chunkList !== []) { 203 $this->logger->success( 204 '{id} split into {count} chunks', 205 ['id' => $page, 'count' => count($chunkList)] 206 ); 207 } else { 208 $this->logger->warning('{id} could not be split into chunks', ['id' => $page]); 209 } 210 } 211 return $chunkList; 212 } 213 214 /** 215 * Do a nearest neighbor search for chunks similar to the given question 216 * 217 * Returns only chunks the current user is allowed to read, may return an empty result. 218 * The number of returned chunks depends on the MAX_CONTEXT_LEN setting. 219 * 220 * @param string $query The question 221 * @param string $lang Limit results to this language 222 * @return Chunk[] 223 * @throws \Exception 224 */ 225 public function getSimilarChunks($query, $lang = '') 226 { 227 global $auth; 228 $vector = $this->embedModel->getEmbedding($query); 229 230 $fetch = min( 231 ($this->chatModel->getMaxInputTokenLength() / $this->getChunkSize()), 232 $this->configContextChunks 233 ); 234 235 $time = microtime(true); 236 $chunks = $this->storage->getSimilarChunks($vector, $lang, $fetch); 237 if ($this->logger instanceof CLI) { 238 $this->logger->info( 239 'Fetched {count} similar chunks from store in {time} seconds', 240 ['count' => count($chunks), 'time' => round(microtime(true) - $time, 2)] 241 ); 242 } 243 244 $size = 0; 245 $result = []; 246 foreach ($chunks as $chunk) { 247 // filter out chunks the user is not allowed to read 248 if ($auth && auth_quickaclcheck($chunk->getPage()) < AUTH_READ) continue; 249 250 $chunkSize = count($this->getTokenEncoder()->encode($chunk->getText())); 251 if ($size + $chunkSize > $this->chatModel->getMaxInputTokenLength()) break; // we have enough 252 253 $result[] = $chunk; 254 $size += $chunkSize; 255 } 256 return $result; 257 } 258 259 260 /** 261 * @param $text 262 * @return array 263 * @throws \Exception 264 * @todo support splitting too long sentences 265 */ 266 public function splitIntoChunks($text) 267 { 268 $sentenceSplitter = new Sentence(); 269 $tiktok = $this->getTokenEncoder(); 270 271 $chunks = []; 272 $sentences = $sentenceSplitter->split($text); 273 274 $chunklen = 0; 275 $chunk = ''; 276 while ($sentence = array_shift($sentences)) { 277 $slen = count($tiktok->encode($sentence)); 278 if ($slen > $this->getChunkSize()) { 279 // sentence is too long, we need to split it further 280 if ($this->logger instanceof CLI) $this->logger->warning( 281 'Sentence too long, splitting not implemented yet' 282 ); 283 continue; 284 } 285 286 if ($chunklen + $slen < $this->getChunkSize()) { 287 // add to current chunk 288 $chunk .= $sentence; 289 $chunklen += $slen; 290 // remember sentence for overlap check 291 $this->rememberSentence($sentence); 292 } else { 293 // add current chunk to result 294 $chunks[] = $chunk; 295 296 // start new chunk with remembered sentences 297 $chunk = implode(' ', $this->sentenceQueue); 298 $chunk .= $sentence; 299 $chunklen = count($tiktok->encode($chunk)); 300 } 301 } 302 $chunks[] = $chunk; 303 304 return $chunks; 305 } 306 307 /** 308 * Add a sentence to the queue of remembered sentences 309 * 310 * @param string $sentence 311 * @return void 312 */ 313 protected function rememberSentence($sentence) 314 { 315 // add sentence to queue 316 $this->sentenceQueue[] = $sentence; 317 318 // remove oldest sentences from queue until we are below the max overlap 319 $encoder = $this->getTokenEncoder(); 320 while (count($encoder->encode(implode(' ', $this->sentenceQueue))) > self::MAX_OVERLAP_LEN) { 321 array_shift($this->sentenceQueue); 322 } 323 } 324} 325