xref: /plugin/dokullm/conf/default.php (revision 590368144294a28ecf0e0e39feb976bf79fefb1e)
1*59036814SCostin Stroie<?php
2*59036814SCostin Stroie/**
3*59036814SCostin Stroie * Default settings for the dokullm plugin
4*59036814SCostin Stroie *
5*59036814SCostin Stroie * This file defines the default configuration values for the LLM integration plugin.
6*59036814SCostin Stroie * These values can be overridden by the user in the plugin configuration.
7*59036814SCostin Stroie */
8*59036814SCostin Stroie
9*59036814SCostin Stroie/**
10*59036814SCostin Stroie * The API endpoint URL for the LLM service
11*59036814SCostin Stroie *
12*59036814SCostin Stroie * This should be the full URL to the chat completions endpoint of your LLM provider.
13*59036814SCostin Stroie * The default is set to OpenAI's GPT API endpoint.
14*59036814SCostin Stroie *
15*59036814SCostin Stroie * @var string
16*59036814SCostin Stroie */
17*59036814SCostin Stroie$conf['api_url'] = 'https://api.openai.com/v1/chat/completions';
18*59036814SCostin Stroie
19*59036814SCostin Stroie/**
20*59036814SCostin Stroie * The API authentication key
21*59036814SCostin Stroie *
22*59036814SCostin Stroie * This is the secret key used to authenticate with the LLM service.
23*59036814SCostin Stroie * For security, this should be left empty in the default config and set by the user.
24*59036814SCostin Stroie *
25*59036814SCostin Stroie * @var string
26*59036814SCostin Stroie */
27*59036814SCostin Stroie$conf['api_key'] = '';
28*59036814SCostin Stroie
29*59036814SCostin Stroie/**
30*59036814SCostin Stroie * The model identifier to use for text processing
31*59036814SCostin Stroie *
32*59036814SCostin Stroie * Specifies which LLM model to use for processing requests.
33*59036814SCostin Stroie * The default is gpt-3.5-turbo, but can be changed to other models like gpt-4.
34*59036814SCostin Stroie *
35*59036814SCostin Stroie * @var string
36*59036814SCostin Stroie */
37*59036814SCostin Stroie$conf['model'] = 'gpt-3.5-turbo';
38*59036814SCostin Stroie
39*59036814SCostin Stroie/**
40*59036814SCostin Stroie * The request timeout in seconds
41*59036814SCostin Stroie *
42*59036814SCostin Stroie * Maximum time to wait for a response from the LLM API before timing out.
43*59036814SCostin Stroie * Set to 30 seconds by default, which should be sufficient for most requests.
44*59036814SCostin Stroie *
45*59036814SCostin Stroie * @var int
46*59036814SCostin Stroie */
47*59036814SCostin Stroie$conf['timeout'] = 30;
48*59036814SCostin Stroie
49*59036814SCostin Stroie/**
50*59036814SCostin Stroie * The language for prompts
51*59036814SCostin Stroie *
52*59036814SCostin Stroie * Specifies which language to use for the prompts.
53*59036814SCostin Stroie * 'default' uses English prompts, 'ro' uses Romanian prompts.
54*59036814SCostin Stroie *
55*59036814SCostin Stroie * @var string
56*59036814SCostin Stroie */
57*59036814SCostin Stroie$conf['language'] = 'default';
58*59036814SCostin Stroie
59*59036814SCostin Stroie/**
60*59036814SCostin Stroie * The temperature setting for the LLM
61*59036814SCostin Stroie *
62*59036814SCostin Stroie * Controls the randomness of the LLM output. Lower values (0.0-0.5) make the output
63*59036814SCostin Stroie * more deterministic and focused, while higher values (0.5-1.0) make it more random
64*59036814SCostin Stroie * and creative. Default is 0.3 for consistent, high-quality responses.
65*59036814SCostin Stroie *
66*59036814SCostin Stroie * @var float
67*59036814SCostin Stroie */
68*59036814SCostin Stroie$conf['temperature'] = 0.3;
69*59036814SCostin Stroie
70*59036814SCostin Stroie/**
71*59036814SCostin Stroie * The top-p (nucleus sampling) setting for the LLM
72*59036814SCostin Stroie *
73*59036814SCostin Stroie * Controls the cumulative probability of token selection. Lower values (0.1-0.5) make
74*59036814SCostin Stroie * the output more focused, while higher values (0.5-1.0) allow for more diverse outputs.
75*59036814SCostin Stroie * Default is 0.8 for a good balance between creativity and coherence.
76*59036814SCostin Stroie *
77*59036814SCostin Stroie * @var float
78*59036814SCostin Stroie */
79*59036814SCostin Stroie$conf['top_p'] = 0.8;
80*59036814SCostin Stroie
81*59036814SCostin Stroie/**
82*59036814SCostin Stroie * The top-k setting for the LLM
83*59036814SCostin Stroie *
84*59036814SCostin Stroie * Limits the number of highest probability tokens considered for each step.
85*59036814SCostin Stroie * Lower values (1-10) make the output more focused, while higher values (10-50)
86*59036814SCostin Stroie * allow for more diverse outputs. Default is 20 for balanced diversity.
87*59036814SCostin Stroie *
88*59036814SCostin Stroie * @var int
89*59036814SCostin Stroie */
90*59036814SCostin Stroie$conf['top_k'] = 20;
91*59036814SCostin Stroie
92*59036814SCostin Stroie/**
93*59036814SCostin Stroie * The min-p setting for the LLM
94*59036814SCostin Stroie *
95*59036814SCostin Stroie * Sets a minimum probability threshold for token selection. Tokens with probabilities
96*59036814SCostin Stroie * below this threshold are filtered out. Default is 0.0 (no filtering).
97*59036814SCostin Stroie *
98*59036814SCostin Stroie * @var float
99*59036814SCostin Stroie */
100*59036814SCostin Stroie$conf['min_p'] = 0.0;
101*59036814SCostin Stroie
102*59036814SCostin Stroie/**
103*59036814SCostin Stroie * Show copy button in the toolbar
104*59036814SCostin Stroie *
105*59036814SCostin Stroie * Controls whether the copy page button is displayed in the LLM toolbar.
106*59036814SCostin Stroie * When true, the copy button will be visible; when false, it will be hidden.
107*59036814SCostin Stroie *
108*59036814SCostin Stroie * @var bool
109*59036814SCostin Stroie */
110*59036814SCostin Stroie$conf['show_copy_button'] = true;
111*59036814SCostin Stroie
112*59036814SCostin Stroie/**
113*59036814SCostin Stroie * Replace ID in template content
114*59036814SCostin Stroie *
115*59036814SCostin Stroie * Controls whether the template page ID should be replaced with the new page ID
116*59036814SCostin Stroie * when copying a page with a template. When true, the template ID will be replaced;
117*59036814SCostin Stroie * when false, it will be left as is.
118*59036814SCostin Stroie *
119*59036814SCostin Stroie * @var bool
120*59036814SCostin Stroie */
121*59036814SCostin Stroie$conf['replace_id'] = true;
122*59036814SCostin Stroie
123*59036814SCostin Stroie/**
124*59036814SCostin Stroie * Enable thinking in LLM responses
125*59036814SCostin Stroie *
126*59036814SCostin Stroie * Controls whether the LLM should engage in deeper thinking processes before responding.
127*59036814SCostin Stroie * When true, the LLM will use thinking capabilities and may take longer to respond;
128*59036814SCostin Stroie * when false, it will provide direct responses without extended thinking.
129*59036814SCostin Stroie *
130*59036814SCostin Stroie * @var bool
131*59036814SCostin Stroie */
132*59036814SCostin Stroie$conf['think'] = false;
133*59036814SCostin Stroie
134*59036814SCostin Stroie/**
135*59036814SCostin Stroie * Enable tool usage in LLM responses
136*59036814SCostin Stroie *
137*59036814SCostin Stroie * Controls whether the LLM can use tools to enhance its responses.
138*59036814SCostin Stroie * When true, the LLM can call tools like get_document, get_template, and get_examples;
139*59036814SCostin Stroie * when false, these tools will not be available to the LLM.
140*59036814SCostin Stroie *
141*59036814SCostin Stroie * @var bool
142*59036814SCostin Stroie */
143*59036814SCostin Stroie$conf['use_tools'] = false;
144*59036814SCostin Stroie
145