headless_lms_server/programs/seed/
seed_application_task_llms.rs

1use headless_lms_models::{
2    application_task_default_language_models::{
3        self, ApplicationTask, ApplicationTaskDefaultLanguageModel,
4    },
5    chatbot_configurations_models::{self, NewChatbotConfigurationModel},
6};
7
8use crate::prelude::*;
9
10pub struct SeedApplicationLLMsResult {
11    pub llm_default_model_id: Uuid,
12    pub llm_default_model_thinking: bool,
13}
14
15pub async fn seed_application_task_llms(
16    db_pool: PgPool,
17) -> anyhow::Result<SeedApplicationLLMsResult> {
18    let mut conn = db_pool.acquire().await?;
19
20    let llm = chatbot_configurations_models::insert(
21        &mut conn,
22        NewChatbotConfigurationModel {
23            id: Uuid::parse_str("f14d70bd-c228-4447-bddd-4f6f66705356")?,
24            model: "mock-gpt".to_string(),
25            thinking: false,
26            default_model: true,
27            deployment_name: "mock-gpt".to_string(),
28            context_size: 10000,
29        },
30    )
31    .await?;
32
33    application_task_default_language_models::insert(
34        &mut conn,
35        ApplicationTaskDefaultLanguageModel {
36            model_id: llm.id,
37            task: ApplicationTask::ContentCleaning,
38            context_utilization: 0.75,
39            ..Default::default()
40        },
41    )
42    .await?;
43
44    application_task_default_language_models::insert(
45        &mut conn,
46        ApplicationTaskDefaultLanguageModel {
47            model_id: llm.id,
48            task: ApplicationTask::MessageSuggestion,
49            context_utilization: 0.75,
50            ..Default::default()
51        },
52    )
53    .await?;
54
55    Ok(SeedApplicationLLMsResult {
56        llm_default_model_id: llm.id,
57        llm_default_model_thinking: llm.thinking,
58    })
59}