1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
use anyhow::Context;
use blake3::Hash;
use git2::{build::RepoBuilder, Cred, FetchOptions, RemoteCallbacks, Repository};
use headless_lms_models::{exercise_repositories, repository_exercises};
use headless_lms_utils::{
    file_store::{self, FileStore},
    folder_checksum, ApplicationConfiguration,
};
use sqlx::{Acquire, PgConnection};
use std::{
    collections::HashMap,
    io::Cursor,
    path::{Path, PathBuf},
};
use uuid::Uuid;
use walkdir::{DirEntry, WalkDir};

pub struct RepositoryExercise {
    pub url: String,
}

/// Processes an exercise repository, creating a repository exercise for each exercise in it.
/// Each exercise is compressed and uploaded to file storage.
pub async fn process(
    conn: &mut PgConnection,
    repository_id: Uuid,
    url: &str,
    deploy_key: Option<&str>,
    file_store: &dyn FileStore,
    app_conf: &ApplicationConfiguration,
) -> anyhow::Result<Vec<RepositoryExercise>> {
    let mut stored_files = vec![];
    match process_inner(
        conn,
        repository_id,
        url,
        deploy_key,
        file_store,
        &mut stored_files,
        app_conf,
    )
    .await
    {
        Ok(res) => {
            exercise_repositories::mark_success(conn, repository_id).await?;
            Ok(res)
        }
        Err(err) => {
            if !stored_files.is_empty() {
                warn!("Failed while creating new exercise repository, cleaning files that were uploaded");
                for file in stored_files {
                    if let Err(err) = file_store.delete(&file).await {
                        error!("Failed to clean file {}: {err}", file.display());
                    }
                }
            }
            exercise_repositories::mark_failure(conn, repository_id, &err.to_string()).await?;
            Err(err)
        }
    }
}

// implements the logic for process so that we can conveniently handle all errors in process
async fn process_inner(
    conn: &mut PgConnection,
    repository_id: Uuid,
    url: &str,
    deploy_key: Option<&str>,
    file_store: &dyn FileStore,
    stored_files: &mut Vec<PathBuf>,
    app_conf: &ApplicationConfiguration,
) -> anyhow::Result<Vec<RepositoryExercise>> {
    let mut tx = conn.begin().await?;

    // clone repo to temp dir
    let temp = tempfile::tempdir()?;
    let mut fetch_opts = FetchOptions::new();
    if let Some(deploy_key) = deploy_key {
        let mut remote_cbs = RemoteCallbacks::new();
        remote_cbs.credentials(|_, username, credential_type| {
            if credential_type.is_ssh_memory() {
                Cred::ssh_key_from_memory(username.unwrap_or("git"), None, deploy_key, None)
            } else {
                Err(git2::Error::from_str(
                    "The git server does not support the SSH_MEMORY credential type",
                ))
            }
        });
        fetch_opts.remote_callbacks(remote_cbs);
    }
    RepoBuilder::new()
        .fetch_options(fetch_opts)
        .clone(url, temp.path())?;

    // create exercises in db and store them in file store
    let new_exercises = find_exercise_directories(temp.path()).await?;
    let mut repository_exercises = vec![];
    for ex in &new_exercises {
        let new_exercise_id = Uuid::new_v4();
        let path = create_and_upload_exercise(
            &mut tx,
            repository_id,
            new_exercise_id,
            ex,
            file_store,
            app_conf,
        )
        .await?;
        let url = file_store.get_direct_download_url(&path).await?;
        stored_files.push(path);
        repository_exercises.push(RepositoryExercise { url });
    }

    tx.commit().await?;
    Ok(repository_exercises)
}

/// Updates the given repository using the given url.
/// Exercises with a known checksum but changed part or name are updated to reflect the new part or name.
/// Exercises with a known part and name but changed checksum are updated in the file store and the checksum updated.
/// Errors may leave some exercises updated and others not, since there's no mechanism for rolling back any file store updates.
/// However, these inconsistencies will be fixed after a successful retry.
pub async fn update(
    conn: &mut PgConnection,
    repository: Uuid,
    url: &str,
    file_store: &dyn FileStore,
    app_conf: &ApplicationConfiguration,
) -> anyhow::Result<()> {
    let mut new_stored_files = vec![];
    match update_inner(
        conn,
        repository,
        url,
        file_store,
        &mut new_stored_files,
        app_conf,
    )
    .await
    {
        Ok(res) => Ok(res),
        Err(err) => {
            if !new_stored_files.is_empty() {
                debug!("Failed while updating exercise repository, cleaning new exercises that were uploaded");
                for file in new_stored_files {
                    if let Err(err) = file_store.delete(&file).await {
                        error!("Failed to clean file {}: {err}", file.display());
                    }
                }
            }
            Err(err)
        }
    }
}

async fn update_inner(
    conn: &mut PgConnection,
    repository: Uuid,
    url: &str,
    file_store: &dyn FileStore,
    new_stored_files: &mut Vec<PathBuf>,
    app_conf: &ApplicationConfiguration,
) -> anyhow::Result<()> {
    let mut tx = conn.begin().await?;

    let temp = tempfile::tempdir()?;
    Repository::clone(url, &temp)?;

    let repository_exercises = find_exercise_directories(temp.path()).await?;
    let current_exercises = repository_exercises::get_for_repository(&mut tx, repository).await?;

    let mut by_name = HashMap::new();
    let mut by_checksum = HashMap::new();
    for ex in &current_exercises {
        by_name.insert((&ex.part, &ex.name), ex);
        by_checksum.insert(&ex.checksum, ex);
    }
    for ex in repository_exercises {
        if let Some(&current) = by_name.get(&(&ex.part, &ex.name)) {
            // found known exercise by part and name
            if current.checksum != ex.checksum.as_bytes() {
                // checksum changed, update files and checksum
                create_and_upload_exercise(
                    &mut tx, repository, current.id, &ex, file_store, app_conf,
                )
                .await?;
                repository_exercises::update_checksum(&mut tx, current.id, ex.checksum.as_bytes())
                    .await?;
            }
        } else if let Some(&current) = by_checksum.get(&ex.checksum.as_bytes().to_vec()) {
            // found known exercise by checksum
            if current.part != ex.part || current.name != ex.name {
                // part and/or name changed
                repository_exercises::update_part_and_name(&mut tx, current.id, &ex.part, &ex.name)
                    .await?;
            }
        } else {
            // unknown part/name and checksum, assume new exercise
            let path = create_and_upload_exercise(
                &mut tx,
                repository,
                Uuid::new_v4(),
                &ex,
                file_store,
                app_conf,
            )
            .await?;
            new_stored_files.push(path);
        }
    }

    tx.commit().await?;
    Ok(())
}

/// Marks the exercises and repository as deleted and removes the associated files from the file store.
/// Only returns the last error if there are multiple errors when trying to remove the files.
pub async fn delete(
    conn: &mut PgConnection,
    repository_id: Uuid,
    file_store: &dyn FileStore,
) -> anyhow::Result<()> {
    let mut tx = conn.begin().await?;

    let mut latest_error = None;
    let exercises = repository_exercises::delete_for_repository(&mut tx, repository_id).await?;
    exercise_repositories::delete(&mut tx, repository_id).await?;
    for exercise in exercises {
        let path = file_store::repository_exercise_path(repository_id, exercise);
        if let Err(err) = file_store.delete(&path).await {
            error!(
                "Failed to delete file while deleting repository {}: {err}",
                path.display()
            );
            latest_error = Some(err);
        }
    }

    if let Some(latest_error) = latest_error {
        Err(latest_error.into())
    } else {
        tx.commit().await?;
        Ok(())
    }
}

async fn create_and_upload_exercise(
    conn: &mut PgConnection,
    repository: Uuid,
    exercise_id: Uuid,
    exercise: &NewExercise,
    file_store: &dyn FileStore,
    app_conf: &ApplicationConfiguration,
) -> anyhow::Result<PathBuf> {
    // archive and compress
    let cursor = Cursor::new(vec![]);
    let mut tar = tar::Builder::new(cursor);
    tar.append_dir_all(".", &exercise.path)?;
    let mut tar = tar.into_inner()?;
    // rewind cursor back to the beginning
    tar.set_position(0);
    let tar_zstd = zstd::encode_all(tar, 0)?;

    // upload
    let path = file_store::repository_exercise_path(repository, exercise_id);
    file_store
        .upload(&path, tar_zstd, "application/zstd")
        .await?;
    let url = file_store.get_download_url(&path, app_conf);

    // create
    repository_exercises::new(
        conn,
        exercise_id,
        repository,
        &exercise.part,
        &exercise.name,
        exercise.checksum.as_bytes(),
        &url,
    )
    .await?;
    Ok(path)
}

#[derive(Debug)]
struct NewExercise {
    part: String,
    name: String,
    checksum: Hash,
    path: PathBuf,
}

async fn find_exercise_directories(clone_path: &Path) -> anyhow::Result<Vec<NewExercise>> {
    info!("finding exercise directories in {}", clone_path.display());

    let mut exercises = vec![];
    // exercises in repositories are in subdirs like
    // part01/01_exercise
    // part01/02_exercise
    // part02/01_exercise
    for entry in WalkDir::new(clone_path)
        .min_depth(2)
        .max_depth(2)
        .into_iter()
        .filter_entry(|e| {
            e.file_name() != "private"
                && !is_hidden_dir(e)
                && !contains_tmcignore(e)
                && !is_in_git_dir(e.path())
        })
    {
        let entry = entry?;
        let checksum = folder_checksum::hash_folder(entry.path()).await?;

        let path = entry.into_path().canonicalize()?;
        let part = path
            .parent()
            .expect("Path should be in a subdirectory")
            .file_name()
            .expect("The parent file name cannot be missing")
            .to_str()
            .context("Invalid directory name in repository")?
            .to_string();
        let name = path
            .file_name()
            .expect("Path should be a file")
            .to_str()
            .context("Invalid directory name in repository")?
            .to_string();
        exercises.push(NewExercise {
            part,
            name,
            checksum,
            path,
        });
    }
    Ok(exercises)
}

// Filter for hidden directories (directories with names starting with '.')
fn is_hidden_dir(entry: &DirEntry) -> bool {
    let skip = entry.metadata().map(|e| e.is_dir()).unwrap_or_default()
        && entry
            .file_name()
            .to_str()
            .map(|s| s.starts_with('.'))
            .unwrap_or_default();
    if skip {
        debug!("is hidden dir: {}", entry.path().display());
    }
    skip
}

// Filter for .git directory
fn is_in_git_dir(path: &Path) -> bool {
    let skip = path.parent().map(|p| p.ends_with(".git")).unwrap_or(false);
    if skip {
        debug!("is in git dir: {}", path.display());
    }
    skip
}

fn contains_tmcignore(entry: &DirEntry) -> bool {
    for entry in WalkDir::new(entry.path())
        .max_depth(1)
        .into_iter()
        .filter_map(|e| e.ok())
    {
        let is_file = entry.metadata().map(|e| e.is_file()).unwrap_or_default();
        if is_file && entry.file_name() == ".tmcignore" {
            debug!("contains .tmcignore: {}", entry.path().display());
            return true;
        }
    }
    false
}

#[cfg(test)]
mod test {
    use super::*;
    use std::{fs::Permissions, os::unix::prelude::PermissionsExt, str::FromStr};

    #[tokio::test]
    async fn finds_exercise_dirs() {
        let repo = tempfile::tempdir().unwrap();

        std::fs::create_dir_all(repo.path().join("part01/01_exercise")).unwrap();
        std::fs::write(repo.path().join("part01/01_exercise/file"), "1234").unwrap();

        std::fs::create_dir_all(repo.path().join("part01/02_exercise")).unwrap();
        std::fs::write(repo.path().join("part01/02_exercise/file"), "1234").unwrap();

        std::fs::create_dir_all(repo.path().join("part02/01_exercise")).unwrap();
        std::fs::write(repo.path().join("part02/01_exercise/file"), "1234").unwrap();

        // Make sure permissions are the same on all systems. Some systems have different default permissions in the temp folder.
        let file_paths = vec![
            repo.path().join("part01/01_exercise/file"),
            repo.path().join("part01/02_exercise/file"),
            repo.path().join("part02/01_exercise/file"),
        ];
        let folder_paths = vec![
            repo.path().join("part01/01_exercise"),
            repo.path().join("part01/02_exercise"),
            repo.path().join("part02/01_exercise"),
            repo.path().to_path_buf(),
        ];
        for path in file_paths {
            std::fs::set_permissions(path, Permissions::from_mode(0o644)).unwrap();
        }
        for path in folder_paths {
            std::fs::set_permissions(path, Permissions::from_mode(0o755)).unwrap();
        }

        let mut paths = find_exercise_directories(repo.path()).await.unwrap();
        paths.sort_by(|a, b| a.path.cmp(&b.path));
        assert_eq!(paths.len(), 3);

        assert_eq!(&paths[0].path, &repo.path().join("part01/01_exercise"));
        assert_eq!(&paths[0].part, "part01");
        assert_eq!(&paths[0].name, "01_exercise");
        assert_eq!(
            paths[0].checksum,
            Hash::from_str("3a01c5d9a407deec294c4ac561cdeea1a7507464193e06387083853e3ca71c3a")
                .unwrap()
        );

        assert_eq!(&paths[1].name, "02_exercise");
        assert_eq!(&paths[2].name, "01_exercise");
    }

    #[test]
    fn filters_git() {
        assert!(is_in_git_dir(Path::new("something/.git/something")));
        assert!(!is_in_git_dir(Path::new(
            "something/.git/something/something"
        )));
    }
}