xref: /plugin/aichat/Embeddings.php (revision 883057195a47ebf6c0d68d209e87735466d25f89)
18817535bSAndreas Gohr<?php
28817535bSAndreas Gohr
38817535bSAndreas Gohrnamespace dokuwiki\plugin\aichat;
48817535bSAndreas Gohr
57ee8b02dSAndreas Gohruse dokuwiki\plugin\aichat\backend\AbstractStorage;
67ee8b02dSAndreas Gohruse dokuwiki\plugin\aichat\backend\Chunk;
77ee8b02dSAndreas Gohruse dokuwiki\plugin\aichat\backend\KDTreeStorage;
87ee8b02dSAndreas Gohruse dokuwiki\plugin\aichat\backend\SQLiteStorage;
98817535bSAndreas Gohruse dokuwiki\Search\Indexer;
10ad38c5fdSAndreas Gohruse Hexogen\KDTree\Exception\ValidationException;
112ecc089aSAndreas Gohruse splitbrain\phpcli\CLI;
128817535bSAndreas Gohruse TikToken\Encoder;
138817535bSAndreas Gohruse Vanderlee\Sentence\Sentence;
148817535bSAndreas Gohr
159da5f0dfSAndreas Gohr/**
169da5f0dfSAndreas Gohr * Manage the embeddings index
179da5f0dfSAndreas Gohr *
189da5f0dfSAndreas Gohr * Pages are split into chunks of 1000 tokens each. For each chunk the embedding vector is fetched from
197ee8b02dSAndreas Gohr * OpenAI and stored in the Storage backend.
209da5f0dfSAndreas Gohr */
218817535bSAndreas Gohrclass Embeddings
228817535bSAndreas Gohr{
238817535bSAndreas Gohr
24c4584168SAndreas Gohr    const MAX_TOKEN_LEN = 1000;
257ee8b02dSAndreas Gohr
268817535bSAndreas Gohr
272ecc089aSAndreas Gohr    /** @var OpenAI */
288817535bSAndreas Gohr    protected $openAI;
292ecc089aSAndreas Gohr    /** @var CLI|null */
302ecc089aSAndreas Gohr    protected $logger;
318817535bSAndreas Gohr
327ee8b02dSAndreas Gohr    /** @var AbstractStorage */
337ee8b02dSAndreas Gohr    protected $storage;
347ee8b02dSAndreas Gohr
352ecc089aSAndreas Gohr    /**
362ecc089aSAndreas Gohr     * @param OpenAI $openAI
372ecc089aSAndreas Gohr     */
382ecc089aSAndreas Gohr    public function __construct(OpenAI $openAI)
398817535bSAndreas Gohr    {
408817535bSAndreas Gohr        $this->openAI = $openAI;
417ee8b02dSAndreas Gohr        //$this->storage = new KDTreeStorage(); // FIXME make configurable
427ee8b02dSAndreas Gohr        $this->storage = new SQLiteStorage(); // FIXME make configurable
437ee8b02dSAndreas Gohr    }
447ee8b02dSAndreas Gohr
457ee8b02dSAndreas Gohr    /**
467ee8b02dSAndreas Gohr     * Access storage
477ee8b02dSAndreas Gohr     *
487ee8b02dSAndreas Gohr     * @return AbstractStorage
497ee8b02dSAndreas Gohr     */
507ee8b02dSAndreas Gohr    public function getStorage()
517ee8b02dSAndreas Gohr    {
527ee8b02dSAndreas Gohr        return $this->storage;
532ecc089aSAndreas Gohr    }
542ecc089aSAndreas Gohr
552ecc089aSAndreas Gohr    /**
562ecc089aSAndreas Gohr     * Add a logger instance
572ecc089aSAndreas Gohr     *
582ecc089aSAndreas Gohr     * @param CLI $logger
592ecc089aSAndreas Gohr     * @return void
602ecc089aSAndreas Gohr     */
612ecc089aSAndreas Gohr    public function setLogger(CLI $logger)
622ecc089aSAndreas Gohr    {
638817535bSAndreas Gohr        $this->logger = $logger;
648817535bSAndreas Gohr    }
658817535bSAndreas Gohr
662ecc089aSAndreas Gohr    /**
675284515dSAndreas Gohr     * Update the embeddings storage
682ecc089aSAndreas Gohr     *
69ad38c5fdSAndreas Gohr     * @param string $skipRE Regular expression to filter out pages (full RE with delimiters)
705284515dSAndreas Gohr     * @param bool $clear Should any existing storage be cleared before updating?
712ecc089aSAndreas Gohr     * @return void
725284515dSAndreas Gohr     * @throws \Exception
732ecc089aSAndreas Gohr     */
745284515dSAndreas Gohr    public function createNewIndex($skipRE = '', $clear = false)
758817535bSAndreas Gohr    {
768817535bSAndreas Gohr        $indexer = new Indexer();
778817535bSAndreas Gohr        $pages = $indexer->getPages();
788817535bSAndreas Gohr
795284515dSAndreas Gohr        $this->storage->startCreation(1536, $clear);
805aa45b4dSAndreas Gohr        foreach ($pages as $pid => $page) {
815aa45b4dSAndreas Gohr            $chunkID = $pid * 100; // chunk IDs start at page ID * 100
825aa45b4dSAndreas Gohr
835284515dSAndreas Gohr            if (
845284515dSAndreas Gohr                !page_exists($page) ||
855284515dSAndreas Gohr                isHiddenPage($page) ||
864e206c13SAndreas Gohr                filesize(wikiFN($page)) < 150 || // skip very small pages
875284515dSAndreas Gohr                ($skipRE && preg_match($skipRE, $page))
885284515dSAndreas Gohr            ) {
895284515dSAndreas Gohr                // this page should not be in the index (anymore)
905284515dSAndreas Gohr                $this->storage->deletePageChunks($page, $chunkID);
915284515dSAndreas Gohr                continue;
925284515dSAndreas Gohr            }
935284515dSAndreas Gohr
947ee8b02dSAndreas Gohr            $firstChunk = $this->storage->getChunk($chunkID);
957ee8b02dSAndreas Gohr            if ($firstChunk && @filemtime(wikiFN($page)) < $firstChunk->getCreated()) {
965aa45b4dSAndreas Gohr                // page is older than the chunks we have, reuse the existing chunks
977ee8b02dSAndreas Gohr                $this->storage->reusePageChunks($page, $chunkID);
9833128f96SAndreas Gohr                if ($this->logger) $this->logger->info("Reusing chunks for $page");
995aa45b4dSAndreas Gohr            } else {
1005aa45b4dSAndreas Gohr                // page is newer than the chunks we have, create new chunks
1017ee8b02dSAndreas Gohr                $this->storage->deletePageChunks($page, $chunkID);
1027ee8b02dSAndreas Gohr                $this->storage->addPageChunks($this->createPageChunks($page, $chunkID));
1035aa45b4dSAndreas Gohr            }
1045aa45b4dSAndreas Gohr        }
1057ee8b02dSAndreas Gohr        $this->storage->finalizeCreation();
1065aa45b4dSAndreas Gohr    }
1075aa45b4dSAndreas Gohr
1085aa45b4dSAndreas Gohr    /**
1097ee8b02dSAndreas Gohr     * Split the given page, fetch embedding vectors and return Chunks
1105aa45b4dSAndreas Gohr     *
111*88305719SAndreas Gohr     * Will use the text renderer plugin if available to get the rendered text.
112*88305719SAndreas Gohr     * Otherwise the raw wiki text is used.
113*88305719SAndreas Gohr     *
1145aa45b4dSAndreas Gohr     * @param string $page Name of the page to split
1157ee8b02dSAndreas Gohr     * @param int $firstChunkID The ID of the first chunk of this page
1167ee8b02dSAndreas Gohr     * @return Chunk[] A list of chunks created for this page
1175aa45b4dSAndreas Gohr     * @throws \Exception
1185aa45b4dSAndreas Gohr     */
1197ee8b02dSAndreas Gohr    protected function createPageChunks($page, $firstChunkID)
1205aa45b4dSAndreas Gohr    {
1217ee8b02dSAndreas Gohr        $chunkList = [];
122*88305719SAndreas Gohr
123*88305719SAndreas Gohr        $textRenderer = plugin_load('renderer', 'text');
124*88305719SAndreas Gohr        if ($textRenderer) {
125*88305719SAndreas Gohr            global $ID;
126*88305719SAndreas Gohr            $ID = $page;
127*88305719SAndreas Gohr            $text = p_cached_output(wikiFN($page), 'text', $page);
128*88305719SAndreas Gohr        } else {
129*88305719SAndreas Gohr            $text = rawWiki($page);
130*88305719SAndreas Gohr        }
131*88305719SAndreas Gohr
132*88305719SAndreas Gohr        $parts = $this->splitIntoChunks($text);
1337ee8b02dSAndreas Gohr        foreach ($parts as $part) {
134ad38c5fdSAndreas Gohr            try {
1357ee8b02dSAndreas Gohr                $embedding = $this->openAI->getEmbedding($part);
136ad38c5fdSAndreas Gohr            } catch (\Exception $e) {
137ad38c5fdSAndreas Gohr                if ($this->logger) {
138ad38c5fdSAndreas Gohr                    $this->logger->error(
139ad38c5fdSAndreas Gohr                        'Failed to get embedding for chunk of page {page}: {msg}',
140ad38c5fdSAndreas Gohr                        ['page' => $page, 'msg' => $e->getMessage()]
141ad38c5fdSAndreas Gohr                    );
142ad38c5fdSAndreas Gohr                }
143ad38c5fdSAndreas Gohr                continue;
144ad38c5fdSAndreas Gohr            }
1457ee8b02dSAndreas Gohr            $chunkList[] = new Chunk($page, $firstChunkID, $part, $embedding);
1467ee8b02dSAndreas Gohr            $firstChunkID++;
1478817535bSAndreas Gohr        }
1488817535bSAndreas Gohr        if ($this->logger) {
1497ee8b02dSAndreas Gohr            $this->logger->success('{id} split into {count} chunks', ['id' => $page, 'count' => count($parts)]);
1508817535bSAndreas Gohr        }
1517ee8b02dSAndreas Gohr        return $chunkList;
1528817535bSAndreas Gohr    }
1538817535bSAndreas Gohr
1549e81bea7SAndreas Gohr    /**
1559e81bea7SAndreas Gohr     * Do a nearest neighbor search for chunks similar to the given question
1569e81bea7SAndreas Gohr     *
1579e81bea7SAndreas Gohr     * Returns only chunks the current user is allowed to read, may return an empty result.
1589e81bea7SAndreas Gohr     *
1599e81bea7SAndreas Gohr     * @param string $query The question
1609e81bea7SAndreas Gohr     * @param int $limit The number of results to return
1617ee8b02dSAndreas Gohr     * @return Chunk[]
1629e81bea7SAndreas Gohr     * @throws \Exception
1639e81bea7SAndreas Gohr     */
1648817535bSAndreas Gohr    public function getSimilarChunks($query, $limit = 4)
1658817535bSAndreas Gohr    {
1669e81bea7SAndreas Gohr        global $auth;
1677ee8b02dSAndreas Gohr        $vector = $this->openAI->getEmbedding($query);
1688817535bSAndreas Gohr
1697ee8b02dSAndreas Gohr        $chunks = $this->storage->getSimilarChunks($vector, $limit);
1708817535bSAndreas Gohr        $result = [];
1717ee8b02dSAndreas Gohr        foreach ($chunks as $chunk) {
1729e81bea7SAndreas Gohr            // filter out chunks the user is not allowed to read
1737ee8b02dSAndreas Gohr            if ($auth && auth_quickaclcheck($chunk->getPage()) < AUTH_READ) continue;
1749e81bea7SAndreas Gohr            $result[] = $chunk;
1759e81bea7SAndreas Gohr            if (count($result) >= $limit) break;
1768817535bSAndreas Gohr        }
1778817535bSAndreas Gohr        return $result;
1788817535bSAndreas Gohr    }
1798817535bSAndreas Gohr
1805786be46SAndreas Gohr
1815786be46SAndreas Gohr    /**
1828817535bSAndreas Gohr     * @param $text
1838817535bSAndreas Gohr     * @return array
1848817535bSAndreas Gohr     * @throws \Exception
1858817535bSAndreas Gohr     * @todo maybe add overlap support
1868817535bSAndreas Gohr     * @todo support splitting too long sentences
1878817535bSAndreas Gohr     */
188ad38c5fdSAndreas Gohr    public function splitIntoChunks($text)
1898817535bSAndreas Gohr    {
1908817535bSAndreas Gohr        $sentenceSplitter = new Sentence();
1918817535bSAndreas Gohr        $tiktok = new Encoder();
1928817535bSAndreas Gohr
1938817535bSAndreas Gohr        $chunks = [];
1948817535bSAndreas Gohr        $sentences = $sentenceSplitter->split($text);
1958817535bSAndreas Gohr
1968817535bSAndreas Gohr        $chunklen = 0;
1978817535bSAndreas Gohr        $chunk = '';
1988817535bSAndreas Gohr        while ($sentence = array_shift($sentences)) {
1998817535bSAndreas Gohr            $slen = count($tiktok->encode($sentence));
2008817535bSAndreas Gohr            if ($slen > self::MAX_TOKEN_LEN) {
2018817535bSAndreas Gohr                // sentence is too long, we need to split it further
202ad38c5fdSAndreas Gohr                if ($this->logger) $this->logger->warning('Sentence too long, splitting not implemented yet');
203ad38c5fdSAndreas Gohr                continue;
2048817535bSAndreas Gohr            }
2058817535bSAndreas Gohr
2068817535bSAndreas Gohr            if ($chunklen + $slen < self::MAX_TOKEN_LEN) {
2078817535bSAndreas Gohr                // add to current chunk
2088817535bSAndreas Gohr                $chunk .= $sentence;
2098817535bSAndreas Gohr                $chunklen += $slen;
2108817535bSAndreas Gohr            } else {
2118817535bSAndreas Gohr                // start new chunk
2128817535bSAndreas Gohr                $chunks[] = $chunk;
2138817535bSAndreas Gohr                $chunk = $sentence;
2148817535bSAndreas Gohr                $chunklen = $slen;
2158817535bSAndreas Gohr            }
2168817535bSAndreas Gohr        }
2178817535bSAndreas Gohr        $chunks[] = $chunk;
2188817535bSAndreas Gohr
2198817535bSAndreas Gohr        return $chunks;
2208817535bSAndreas Gohr    }
2218817535bSAndreas Gohr}
222