18817535bSAndreas Gohr<?php 28817535bSAndreas Gohr 38817535bSAndreas Gohrnamespace dokuwiki\plugin\aichat; 48817535bSAndreas Gohr 5*7ebc7895Ssplitbrainuse dokuwiki\Extension\PluginInterface; 6f6ef2e50SAndreas Gohruse dokuwiki\plugin\aichat\Model\AbstractModel; 7f6ef2e50SAndreas Gohruse dokuwiki\plugin\aichat\Storage\AbstractStorage; 88817535bSAndreas Gohruse dokuwiki\Search\Indexer; 92ecc089aSAndreas Gohruse splitbrain\phpcli\CLI; 108817535bSAndreas Gohruse TikToken\Encoder; 118817535bSAndreas Gohruse Vanderlee\Sentence\Sentence; 128817535bSAndreas Gohr 139da5f0dfSAndreas Gohr/** 149da5f0dfSAndreas Gohr * Manage the embeddings index 159da5f0dfSAndreas Gohr * 169da5f0dfSAndreas Gohr * Pages are split into chunks of 1000 tokens each. For each chunk the embedding vector is fetched from 177ee8b02dSAndreas Gohr * OpenAI and stored in the Storage backend. 189da5f0dfSAndreas Gohr */ 198817535bSAndreas Gohrclass Embeddings 208817535bSAndreas Gohr{ 2168908844SAndreas Gohr /** @var int maximum overlap between chunks in tokens */ 22*7ebc7895Ssplitbrain public const MAX_OVERLAP_LEN = 200; 238817535bSAndreas Gohr 24f6ef2e50SAndreas Gohr /** @var AbstractModel */ 25f6ef2e50SAndreas Gohr protected $model; 262ecc089aSAndreas Gohr /** @var CLI|null */ 272ecc089aSAndreas Gohr protected $logger; 2868908844SAndreas Gohr /** @var Encoder */ 2968908844SAndreas Gohr protected $tokenEncoder; 308817535bSAndreas Gohr 317ee8b02dSAndreas Gohr /** @var AbstractStorage */ 327ee8b02dSAndreas Gohr protected $storage; 337ee8b02dSAndreas Gohr 3468908844SAndreas Gohr /** @var array remember sentences when chunking */ 3568908844SAndreas Gohr private $sentenceQueue = []; 3668908844SAndreas Gohr 372ecc089aSAndreas Gohr /** 38f6ef2e50SAndreas Gohr * @param AbstractModel $model 392ecc089aSAndreas Gohr */ 40f6ef2e50SAndreas Gohr public function __construct(AbstractModel $model, AbstractStorage $storage) 418817535bSAndreas Gohr { 42f6ef2e50SAndreas Gohr $this->model = $model; 43f6ef2e50SAndreas Gohr $this->storage = $storage; 447ee8b02dSAndreas Gohr } 457ee8b02dSAndreas Gohr 467ee8b02dSAndreas Gohr /** 477ee8b02dSAndreas Gohr * Access storage 487ee8b02dSAndreas Gohr * 497ee8b02dSAndreas Gohr * @return AbstractStorage 507ee8b02dSAndreas Gohr */ 517ee8b02dSAndreas Gohr public function getStorage() 527ee8b02dSAndreas Gohr { 537ee8b02dSAndreas Gohr return $this->storage; 542ecc089aSAndreas Gohr } 552ecc089aSAndreas Gohr 562ecc089aSAndreas Gohr /** 572ecc089aSAndreas Gohr * Add a logger instance 582ecc089aSAndreas Gohr * 592ecc089aSAndreas Gohr * @param CLI $logger 602ecc089aSAndreas Gohr * @return void 612ecc089aSAndreas Gohr */ 622ecc089aSAndreas Gohr public function setLogger(CLI $logger) 632ecc089aSAndreas Gohr { 648817535bSAndreas Gohr $this->logger = $logger; 658817535bSAndreas Gohr } 668817535bSAndreas Gohr 672ecc089aSAndreas Gohr /** 6868908844SAndreas Gohr * Get the token encoder instance 6968908844SAndreas Gohr * 7068908844SAndreas Gohr * @return Encoder 7168908844SAndreas Gohr */ 7268908844SAndreas Gohr public function getTokenEncoder() 7368908844SAndreas Gohr { 74*7ebc7895Ssplitbrain if (!$this->tokenEncoder instanceof Encoder) { 7568908844SAndreas Gohr $this->tokenEncoder = new Encoder(); 7668908844SAndreas Gohr } 7768908844SAndreas Gohr return $this->tokenEncoder; 7868908844SAndreas Gohr } 7968908844SAndreas Gohr 8068908844SAndreas Gohr /** 815284515dSAndreas Gohr * Update the embeddings storage 822ecc089aSAndreas Gohr * 83ad38c5fdSAndreas Gohr * @param string $skipRE Regular expression to filter out pages (full RE with delimiters) 845284515dSAndreas Gohr * @param bool $clear Should any existing storage be cleared before updating? 852ecc089aSAndreas Gohr * @return void 865284515dSAndreas Gohr * @throws \Exception 872ecc089aSAndreas Gohr */ 885284515dSAndreas Gohr public function createNewIndex($skipRE = '', $clear = false) 898817535bSAndreas Gohr { 908817535bSAndreas Gohr $indexer = new Indexer(); 918817535bSAndreas Gohr $pages = $indexer->getPages(); 928817535bSAndreas Gohr 93f6ef2e50SAndreas Gohr $this->storage->startCreation($clear); 945aa45b4dSAndreas Gohr foreach ($pages as $pid => $page) { 955aa45b4dSAndreas Gohr $chunkID = $pid * 100; // chunk IDs start at page ID * 100 965aa45b4dSAndreas Gohr 975284515dSAndreas Gohr if ( 985284515dSAndreas Gohr !page_exists($page) || 995284515dSAndreas Gohr isHiddenPage($page) || 1004e206c13SAndreas Gohr filesize(wikiFN($page)) < 150 || // skip very small pages 1015284515dSAndreas Gohr ($skipRE && preg_match($skipRE, $page)) 1025284515dSAndreas Gohr ) { 1035284515dSAndreas Gohr // this page should not be in the index (anymore) 1045284515dSAndreas Gohr $this->storage->deletePageChunks($page, $chunkID); 1055284515dSAndreas Gohr continue; 1065284515dSAndreas Gohr } 1075284515dSAndreas Gohr 1087ee8b02dSAndreas Gohr $firstChunk = $this->storage->getChunk($chunkID); 1097ee8b02dSAndreas Gohr if ($firstChunk && @filemtime(wikiFN($page)) < $firstChunk->getCreated()) { 1105aa45b4dSAndreas Gohr // page is older than the chunks we have, reuse the existing chunks 1117ee8b02dSAndreas Gohr $this->storage->reusePageChunks($page, $chunkID); 112*7ebc7895Ssplitbrain if ($this->logger instanceof CLI) $this->logger->info("Reusing chunks for $page"); 1135aa45b4dSAndreas Gohr } else { 1145aa45b4dSAndreas Gohr // page is newer than the chunks we have, create new chunks 1157ee8b02dSAndreas Gohr $this->storage->deletePageChunks($page, $chunkID); 1167ee8b02dSAndreas Gohr $this->storage->addPageChunks($this->createPageChunks($page, $chunkID)); 1175aa45b4dSAndreas Gohr } 1185aa45b4dSAndreas Gohr } 1197ee8b02dSAndreas Gohr $this->storage->finalizeCreation(); 1205aa45b4dSAndreas Gohr } 1215aa45b4dSAndreas Gohr 1225aa45b4dSAndreas Gohr /** 1237ee8b02dSAndreas Gohr * Split the given page, fetch embedding vectors and return Chunks 1245aa45b4dSAndreas Gohr * 12588305719SAndreas Gohr * Will use the text renderer plugin if available to get the rendered text. 12688305719SAndreas Gohr * Otherwise the raw wiki text is used. 12788305719SAndreas Gohr * 1285aa45b4dSAndreas Gohr * @param string $page Name of the page to split 1297ee8b02dSAndreas Gohr * @param int $firstChunkID The ID of the first chunk of this page 1307ee8b02dSAndreas Gohr * @return Chunk[] A list of chunks created for this page 1315aa45b4dSAndreas Gohr * @throws \Exception 1325aa45b4dSAndreas Gohr */ 1337ee8b02dSAndreas Gohr protected function createPageChunks($page, $firstChunkID) 1345aa45b4dSAndreas Gohr { 1357ee8b02dSAndreas Gohr $chunkList = []; 13688305719SAndreas Gohr 13788305719SAndreas Gohr $textRenderer = plugin_load('renderer', 'text'); 138*7ebc7895Ssplitbrain if ($textRenderer instanceof PluginInterface) { 13988305719SAndreas Gohr global $ID; 14088305719SAndreas Gohr $ID = $page; 14188305719SAndreas Gohr $text = p_cached_output(wikiFN($page), 'text', $page); 14288305719SAndreas Gohr } else { 14388305719SAndreas Gohr $text = rawWiki($page); 14488305719SAndreas Gohr } 14588305719SAndreas Gohr 14688305719SAndreas Gohr $parts = $this->splitIntoChunks($text); 1477ee8b02dSAndreas Gohr foreach ($parts as $part) { 14893c1dbf4SAndreas Gohr if (trim($part) == '') continue; // skip empty chunks 14993c1dbf4SAndreas Gohr 150ad38c5fdSAndreas Gohr try { 151f6ef2e50SAndreas Gohr $embedding = $this->model->getEmbedding($part); 152ad38c5fdSAndreas Gohr } catch (\Exception $e) { 153*7ebc7895Ssplitbrain if ($this->logger instanceof CLI) { 154ad38c5fdSAndreas Gohr $this->logger->error( 155ad38c5fdSAndreas Gohr 'Failed to get embedding for chunk of page {page}: {msg}', 156ad38c5fdSAndreas Gohr ['page' => $page, 'msg' => $e->getMessage()] 157ad38c5fdSAndreas Gohr ); 158ad38c5fdSAndreas Gohr } 159ad38c5fdSAndreas Gohr continue; 160ad38c5fdSAndreas Gohr } 1617ee8b02dSAndreas Gohr $chunkList[] = new Chunk($page, $firstChunkID, $part, $embedding); 1627ee8b02dSAndreas Gohr $firstChunkID++; 1638817535bSAndreas Gohr } 164*7ebc7895Ssplitbrain if ($this->logger instanceof CLI) { 165*7ebc7895Ssplitbrain if ($chunkList !== []) { 16693c1dbf4SAndreas Gohr $this->logger->success('{id} split into {count} chunks', ['id' => $page, 'count' => count($chunkList)]); 16793c1dbf4SAndreas Gohr } else { 16893c1dbf4SAndreas Gohr $this->logger->warning('{id} could not be split into chunks', ['id' => $page]); 16993c1dbf4SAndreas Gohr } 1708817535bSAndreas Gohr } 1717ee8b02dSAndreas Gohr return $chunkList; 1728817535bSAndreas Gohr } 1738817535bSAndreas Gohr 1749e81bea7SAndreas Gohr /** 1759e81bea7SAndreas Gohr * Do a nearest neighbor search for chunks similar to the given question 1769e81bea7SAndreas Gohr * 1779e81bea7SAndreas Gohr * Returns only chunks the current user is allowed to read, may return an empty result. 17868908844SAndreas Gohr * The number of returned chunks depends on the MAX_CONTEXT_LEN setting. 1799e81bea7SAndreas Gohr * 1809e81bea7SAndreas Gohr * @param string $query The question 181e33a1d7aSAndreas Gohr * @param string $lang Limit results to this language 1827ee8b02dSAndreas Gohr * @return Chunk[] 1839e81bea7SAndreas Gohr * @throws \Exception 1849e81bea7SAndreas Gohr */ 185e33a1d7aSAndreas Gohr public function getSimilarChunks($query, $lang = '') 1868817535bSAndreas Gohr { 1879e81bea7SAndreas Gohr global $auth; 188f6ef2e50SAndreas Gohr $vector = $this->model->getEmbedding($query); 1898817535bSAndreas Gohr 190f6ef2e50SAndreas Gohr $fetch = ceil( 191f6ef2e50SAndreas Gohr ($this->model->getMaxContextTokenLength() / $this->model->getMaxEmbeddingTokenLength()) 192f6ef2e50SAndreas Gohr * 1.5 // fetch a few more than needed, since not all chunks are maximum length 193f6ef2e50SAndreas Gohr ); 194aee9b383SAndreas Gohr 195aee9b383SAndreas Gohr $time = microtime(true); 196e33a1d7aSAndreas Gohr $chunks = $this->storage->getSimilarChunks($vector, $lang, $fetch); 197*7ebc7895Ssplitbrain if ($this->logger instanceof CLI) { 198aee9b383SAndreas Gohr $this->logger->info( 199aee9b383SAndreas Gohr 'Fetched {count} similar chunks from store in {time} seconds', 200aee9b383SAndreas Gohr ['count' => count($chunks), 'time' => round(microtime(true) - $time, 2)] 201aee9b383SAndreas Gohr ); 202aee9b383SAndreas Gohr } 20368908844SAndreas Gohr 20468908844SAndreas Gohr $size = 0; 2058817535bSAndreas Gohr $result = []; 2067ee8b02dSAndreas Gohr foreach ($chunks as $chunk) { 2079e81bea7SAndreas Gohr // filter out chunks the user is not allowed to read 2087ee8b02dSAndreas Gohr if ($auth && auth_quickaclcheck($chunk->getPage()) < AUTH_READ) continue; 20968908844SAndreas Gohr 21068908844SAndreas Gohr $chunkSize = count($this->getTokenEncoder()->encode($chunk->getText())); 211f6ef2e50SAndreas Gohr if ($size + $chunkSize > $this->model->getMaxContextTokenLength()) break; // we have enough 21268908844SAndreas Gohr 2139e81bea7SAndreas Gohr $result[] = $chunk; 21468908844SAndreas Gohr $size += $chunkSize; 2158817535bSAndreas Gohr } 2168817535bSAndreas Gohr return $result; 2178817535bSAndreas Gohr } 2188817535bSAndreas Gohr 2195786be46SAndreas Gohr 2205786be46SAndreas Gohr /** 2218817535bSAndreas Gohr * @param $text 2228817535bSAndreas Gohr * @return array 2238817535bSAndreas Gohr * @throws \Exception 2248817535bSAndreas Gohr * @todo support splitting too long sentences 2258817535bSAndreas Gohr */ 226ad38c5fdSAndreas Gohr public function splitIntoChunks($text) 2278817535bSAndreas Gohr { 2288817535bSAndreas Gohr $sentenceSplitter = new Sentence(); 22968908844SAndreas Gohr $tiktok = $this->getTokenEncoder(); 2308817535bSAndreas Gohr 2318817535bSAndreas Gohr $chunks = []; 2328817535bSAndreas Gohr $sentences = $sentenceSplitter->split($text); 2338817535bSAndreas Gohr 2348817535bSAndreas Gohr $chunklen = 0; 2358817535bSAndreas Gohr $chunk = ''; 2368817535bSAndreas Gohr while ($sentence = array_shift($sentences)) { 2378817535bSAndreas Gohr $slen = count($tiktok->encode($sentence)); 238f6ef2e50SAndreas Gohr if ($slen > $this->model->getMaxEmbeddingTokenLength()) { 2398817535bSAndreas Gohr // sentence is too long, we need to split it further 240*7ebc7895Ssplitbrain if ($this->logger instanceof CLI) $this->logger->warning('Sentence too long, splitting not implemented yet'); 241ad38c5fdSAndreas Gohr continue; 2428817535bSAndreas Gohr } 2438817535bSAndreas Gohr 244f6ef2e50SAndreas Gohr if ($chunklen + $slen < $this->model->getMaxEmbeddingTokenLength()) { 2458817535bSAndreas Gohr // add to current chunk 2468817535bSAndreas Gohr $chunk .= $sentence; 2478817535bSAndreas Gohr $chunklen += $slen; 24868908844SAndreas Gohr // remember sentence for overlap check 24968908844SAndreas Gohr $this->rememberSentence($sentence); 2508817535bSAndreas Gohr } else { 25168908844SAndreas Gohr // add current chunk to result 2528817535bSAndreas Gohr $chunks[] = $chunk; 25368908844SAndreas Gohr 25468908844SAndreas Gohr // start new chunk with remembered sentences 255*7ebc7895Ssplitbrain $chunk = implode(' ', $this->sentenceQueue); 25668908844SAndreas Gohr $chunk .= $sentence; 25768908844SAndreas Gohr $chunklen = count($tiktok->encode($chunk)); 2588817535bSAndreas Gohr } 2598817535bSAndreas Gohr } 2608817535bSAndreas Gohr $chunks[] = $chunk; 2618817535bSAndreas Gohr 2628817535bSAndreas Gohr return $chunks; 2638817535bSAndreas Gohr } 26468908844SAndreas Gohr 26568908844SAndreas Gohr /** 26668908844SAndreas Gohr * Add a sentence to the queue of remembered sentences 26768908844SAndreas Gohr * 26868908844SAndreas Gohr * @param string $sentence 26968908844SAndreas Gohr * @return void 27068908844SAndreas Gohr */ 27168908844SAndreas Gohr protected function rememberSentence($sentence) 27268908844SAndreas Gohr { 27368908844SAndreas Gohr // add sentence to queue 27468908844SAndreas Gohr $this->sentenceQueue[] = $sentence; 27568908844SAndreas Gohr 27668908844SAndreas Gohr // remove oldest sentences from queue until we are below the max overlap 27768908844SAndreas Gohr $encoder = $this->getTokenEncoder(); 278*7ebc7895Ssplitbrain while (count($encoder->encode(implode(' ', $this->sentenceQueue))) > self::MAX_OVERLAP_LEN) { 27968908844SAndreas Gohr array_shift($this->sentenceQueue); 28068908844SAndreas Gohr } 28168908844SAndreas Gohr } 2828817535bSAndreas Gohr} 283