mithril_aggregator/database/repository/
immutable_file_digest_repository.rs

1use std::collections::BTreeMap;
2use std::sync::Arc;
3
4use async_trait::async_trait;
5
6use mithril_common::digesters::cache::{
7    CacheProviderResult, ImmutableDigesterCacheGetError, ImmutableDigesterCacheStoreError,
8    ImmutableFileDigestCacheProvider,
9};
10use mithril_common::digesters::ImmutableFile;
11use mithril_common::entities::{HexEncodedDigest, ImmutableFileName};
12use mithril_common::StdResult;
13use mithril_persistence::sqlite::{ConnectionExtensions, SqliteConnection};
14
15use crate::database::query::{
16    DeleteImmutableFileDigestQuery, GetImmutableFileDigestQuery, UpsertImmutableFileDigestQuery,
17};
18use crate::database::record::ImmutableFileDigestRecord;
19use crate::ImmutableFileDigestMapper;
20
21/// ImmutableFileDigestRepository store for the immutable file digests.
22pub struct ImmutableFileDigestRepository {
23    connection: Arc<SqliteConnection>,
24}
25
26impl ImmutableFileDigestRepository {
27    /// Instantiate service
28    pub fn new(connection: Arc<SqliteConnection>) -> Self {
29        Self { connection }
30    }
31
32    /// Return the [ImmutableFileDigestRecord] for the given [ImmutableFileName].
33    pub async fn get_immutable_file_digest(
34        &self,
35        immutable_file_name: &ImmutableFileName,
36    ) -> StdResult<Option<ImmutableFileDigestRecord>> {
37        self.connection
38            .fetch_first(GetImmutableFileDigestQuery::by_immutable_file_name(
39                immutable_file_name,
40            )?)
41    }
42
43    /// Return all the [ImmutableFileDigestRecord]s.
44    pub async fn get_all_immutable_file_digest(&self) -> StdResult<Vec<ImmutableFileDigestRecord>> {
45        self.connection
46            .fetch_collect(GetImmutableFileDigestQuery::all())
47    }
48
49    /// Create a new [ImmutableFileDigestRecord] in the database.
50    pub async fn upsert_immutable_file_digest(
51        &self,
52        immutable_file_name: &ImmutableFileName,
53        digest: &str,
54    ) -> StdResult<ImmutableFileDigestRecord> {
55        let message = self
56            .connection
57            .fetch_first(UpsertImmutableFileDigestQuery::one(
58                immutable_file_name,
59                digest,
60            )?)?;
61
62        message
63            .ok_or_else(|| panic!("Upserting an immutable_file_digest should not return nothing."))
64    }
65
66    /// Delete all [ImmutableFileDigestRecord] from the database.
67    pub async fn delete_all(&self) -> StdResult<()> {
68        self.connection
69            .apply(DeleteImmutableFileDigestQuery::all())?;
70
71        Ok(())
72    }
73}
74
75#[async_trait]
76impl ImmutableFileDigestCacheProvider for ImmutableFileDigestRepository {
77    async fn store(
78        &self,
79        digest_per_filenames: Vec<(ImmutableFileName, HexEncodedDigest)>,
80    ) -> CacheProviderResult<()> {
81        for (filename, digest) in digest_per_filenames {
82            self.upsert_immutable_file_digest(&filename, &digest)
83                .await
84                .map_err(ImmutableDigesterCacheStoreError::StoreError)?;
85        }
86
87        Ok(())
88    }
89
90    async fn get(
91        &self,
92        immutables: Vec<ImmutableFile>,
93    ) -> CacheProviderResult<BTreeMap<ImmutableFile, Option<HexEncodedDigest>>> {
94        let mut result = BTreeMap::new();
95        for immutable in immutables {
96            let immutable_file_digest = self
97                .get_immutable_file_digest(&immutable.filename)
98                .await
99                .map_err(ImmutableDigesterCacheGetError::StoreError)?;
100
101            result.insert(immutable, immutable_file_digest.map(|f| f.digest));
102        }
103
104        Ok(result)
105    }
106
107    async fn reset(&self) -> CacheProviderResult<()> {
108        self.delete_all()
109            .await
110            .map_err(ImmutableDigesterCacheGetError::StoreError)?;
111
112        Ok(())
113    }
114}
115
116#[async_trait]
117impl ImmutableFileDigestMapper for ImmutableFileDigestRepository {
118    async fn get_immutable_file_digest_map(
119        &self,
120    ) -> StdResult<BTreeMap<ImmutableFileName, HexEncodedDigest>> {
121        let immutable_file_digest_map = BTreeMap::from_iter(
122            self.get_all_immutable_file_digest()
123                .await?
124                .into_iter()
125                .map(|record| (record.immutable_file_name, record.digest)),
126        );
127
128        Ok(immutable_file_digest_map)
129    }
130}
131
132#[cfg(test)]
133mod tests {
134
135    use crate::database::test_helper::main_db_connection;
136
137    use super::*;
138
139    async fn get_connection() -> Arc<SqliteConnection> {
140        let connection = main_db_connection().unwrap();
141
142        Arc::new(connection)
143    }
144
145    mod repository {
146        use mithril_common::test_utils::assert_equivalent;
147
148        use super::*;
149
150        #[tokio::test]
151        async fn repository_get_immutable_file_digest() {
152            let repository = ImmutableFileDigestRepository::new(get_connection().await);
153            let immutable_file_name: ImmutableFileName = "123.chunk".to_string();
154            let digest = "digest-123";
155
156            let immutable_file_digest_result = repository
157                .get_immutable_file_digest(&immutable_file_name)
158                .await
159                .unwrap();
160            assert_eq!(None, immutable_file_digest_result);
161
162            repository
163                .upsert_immutable_file_digest(&immutable_file_name, digest)
164                .await
165                .unwrap();
166            let immutable_file_digest_result = repository
167                .get_immutable_file_digest(&immutable_file_name)
168                .await
169                .unwrap();
170            assert_eq!(
171                Some(ImmutableFileDigestRecord {
172                    immutable_file_name,
173                    digest: digest.to_string()
174                }),
175                immutable_file_digest_result
176            );
177        }
178
179        #[tokio::test]
180        async fn repository_get_all_immutable_file_digests() {
181            let repository = ImmutableFileDigestRepository::new(get_connection().await);
182
183            let all_immutable_file_digests =
184                repository.get_all_immutable_file_digest().await.unwrap();
185            assert!(all_immutable_file_digests.is_empty());
186
187            repository
188                .upsert_immutable_file_digest(&"123.chunk".to_string(), "digest-123")
189                .await
190                .unwrap();
191            repository
192                .upsert_immutable_file_digest(&"456.chunk".to_string(), "digest-456")
193                .await
194                .unwrap();
195            let all_immutable_file_digests =
196                repository.get_all_immutable_file_digest().await.unwrap();
197
198            assert_equivalent(
199                vec![
200                    ImmutableFileDigestRecord {
201                        immutable_file_name: "123.chunk".to_string(),
202                        digest: "digest-123".to_string(),
203                    },
204                    ImmutableFileDigestRecord {
205                        immutable_file_name: "456.chunk".to_string(),
206                        digest: "digest-456".to_string(),
207                    },
208                ],
209                all_immutable_file_digests,
210            );
211        }
212
213        #[tokio::test]
214        async fn repository_upsert_immutable_file_digest() {
215            let repository = ImmutableFileDigestRepository::new(get_connection().await);
216            let immutable_file_name: ImmutableFileName = "123.chunk".to_string();
217            let digest = "digest-123";
218            let digest_updated = "digest-456";
219
220            repository
221                .upsert_immutable_file_digest(&immutable_file_name, digest)
222                .await
223                .unwrap();
224            let immutable_file_digest = repository
225                .get_immutable_file_digest(&immutable_file_name)
226                .await
227                .unwrap()
228                .unwrap();
229            assert_eq!(immutable_file_digest.digest, digest);
230
231            repository
232                .upsert_immutable_file_digest(&immutable_file_name, digest_updated)
233                .await
234                .unwrap();
235            let immutable_file_digest = repository
236                .get_immutable_file_digest(&immutable_file_name)
237                .await
238                .unwrap()
239                .unwrap();
240            assert_eq!(immutable_file_digest.digest, digest_updated);
241        }
242
243        #[tokio::test]
244        async fn repository_delete_all_immutable_file_digests() {
245            let repository = ImmutableFileDigestRepository::new(get_connection().await);
246
247            repository
248                .upsert_immutable_file_digest(&"123.chunk".to_string(), "digest-123")
249                .await
250                .unwrap();
251            repository
252                .upsert_immutable_file_digest(&"456.chunk".to_string(), "digest-456")
253                .await
254                .unwrap();
255            let all_immutable_file_digests =
256                repository.get_all_immutable_file_digest().await.unwrap();
257            assert_eq!(2, all_immutable_file_digests.len());
258
259            repository.delete_all().await.unwrap();
260
261            let all_immutable_file_digests =
262                repository.get_all_immutable_file_digest().await.unwrap();
263            assert!(all_immutable_file_digests.is_empty());
264        }
265    }
266
267    mod cache_provider {
268        use std::path::PathBuf;
269
270        use super::*;
271
272        #[tokio::test]
273        async fn can_store_values() {
274            let provider = ImmutableFileDigestRepository::new(get_connection().await);
275            let values_to_store = vec![
276                ("0.chunk".to_string(), "digest 0".to_string()),
277                ("1.chunk".to_string(), "digest 1".to_string()),
278            ];
279            let expected: BTreeMap<_, _> = BTreeMap::from([
280                (
281                    ImmutableFile::dummy(PathBuf::default(), 0, "0.chunk"),
282                    Some("digest 0".to_string()),
283                ),
284                (
285                    ImmutableFile::dummy(PathBuf::default(), 1, "1.chunk"),
286                    Some("digest 1".to_string()),
287                ),
288            ]);
289            let immutables = expected.keys().cloned().collect();
290
291            provider
292                .store(values_to_store)
293                .await
294                .expect("Cache write should not fail");
295            let result = provider
296                .get(immutables)
297                .await
298                .expect("Cache read should not fail");
299
300            assert_eq!(expected, result);
301        }
302
303        #[tokio::test]
304        async fn returns_only_asked_immutables_cache() {
305            let provider = ImmutableFileDigestRepository::new(get_connection().await);
306            provider
307                .store(vec![
308                    ("0.chunk".to_string(), "digest 0".to_string()),
309                    ("1.chunk".to_string(), "digest 1".to_string()),
310                ])
311                .await
312                .expect("Cache write should not fail");
313            let expected: BTreeMap<_, _> = BTreeMap::from([(
314                ImmutableFile::dummy(PathBuf::default(), 0, "0.chunk"),
315                Some("digest 0".to_string()),
316            )]);
317            let immutables = expected.keys().cloned().collect();
318
319            let result = provider
320                .get(immutables)
321                .await
322                .expect("Cache read should not fail");
323
324            assert_eq!(expected, result);
325        }
326
327        #[tokio::test]
328        async fn returns_none_for_uncached_asked_immutables() {
329            let provider = ImmutableFileDigestRepository::new(get_connection().await);
330            let expected: BTreeMap<_, _> =
331                BTreeMap::from([(ImmutableFile::dummy(PathBuf::default(), 2, "2.chunk"), None)]);
332            let immutables = expected.keys().cloned().collect();
333
334            let result = provider
335                .get(immutables)
336                .await
337                .expect("Cache read should not fail");
338
339            assert_eq!(expected, result);
340        }
341
342        #[tokio::test]
343        async fn store_erase_existing_values() {
344            let provider = ImmutableFileDigestRepository::new(get_connection().await);
345            provider
346                .store(vec![
347                    ("0.chunk".to_string(), "to erase".to_string()),
348                    ("1.chunk".to_string(), "keep me".to_string()),
349                    ("2.chunk".to_string(), "keep me too".to_string()),
350                ])
351                .await
352                .expect("Cache write should not fail");
353            let values_to_store = vec![
354                ("0.chunk".to_string(), "updated".to_string()),
355                ("1.chunk".to_string(), "keep me".to_string()),
356            ];
357            let expected: BTreeMap<_, _> = BTreeMap::from([
358                (
359                    ImmutableFile::dummy(PathBuf::default(), 0, "0.chunk"),
360                    Some("updated".to_string()),
361                ),
362                (
363                    ImmutableFile::dummy(PathBuf::default(), 1, "1.chunk"),
364                    Some("keep me".to_string()),
365                ),
366                (
367                    ImmutableFile::dummy(PathBuf::default(), 2, "2.chunk"),
368                    Some("keep me too".to_string()),
369                ),
370                (ImmutableFile::dummy(PathBuf::default(), 3, "3.chunk"), None),
371            ]);
372            let immutables = expected.keys().cloned().collect();
373
374            provider
375                .store(values_to_store)
376                .await
377                .expect("Cache write should not fail");
378            let result = provider
379                .get(immutables)
380                .await
381                .expect("Cache read should not fail");
382
383            assert_eq!(expected, result);
384        }
385
386        #[tokio::test]
387        async fn reset_clear_existing_values() {
388            let provider = ImmutableFileDigestRepository::new(get_connection().await);
389            let values_to_store = vec![
390                ("0.chunk".to_string(), "digest 0".to_string()),
391                ("1.chunk".to_string(), "digest 1".to_string()),
392            ];
393            let expected: BTreeMap<_, _> = BTreeMap::from([
394                (
395                    ImmutableFile::dummy(PathBuf::default(), 0, "0.chunk"),
396                    Some("digest 0".to_string()),
397                ),
398                (
399                    ImmutableFile::dummy(PathBuf::default(), 1, "1.chunk"),
400                    Some("digest 1".to_string()),
401                ),
402            ]);
403            let immutables = expected.keys().cloned().collect();
404
405            provider
406                .store(values_to_store)
407                .await
408                .expect("Cache write should not fail");
409            provider.reset().await.expect("reset should not fails");
410
411            let result: BTreeMap<_, _> = provider
412                .get(immutables)
413                .await
414                .expect("Cache read should not fail");
415
416            assert!(result.into_iter().all(|(_, cache)| cache.is_none()));
417        }
418    }
419
420    #[cfg(test)]
421    mod digest_mapper {
422
423        use mithril_common::digesters::cache::ImmutableFileDigestCacheProvider;
424
425        use super::*;
426
427        #[tokio::test]
428        async fn get_immutable_file_digest_map() {
429            let provider = ImmutableFileDigestRepository::new(get_connection().await);
430            let immutable_file_digest_records = vec![
431                ("0.chunk".to_string(), "digest 0".to_string()),
432                ("1.chunk".to_string(), "digest 1".to_string()),
433                ("2.chunk".to_string(), "digest 2".to_string()),
434            ];
435            let expected_immutable_file_digest_map =
436                BTreeMap::from_iter(immutable_file_digest_records.clone().into_iter());
437            provider
438                .store(immutable_file_digest_records)
439                .await
440                .expect("Cache write should not fail");
441
442            let immutable_file_digest_map = provider.get_immutable_file_digest_map().await.unwrap();
443
444            assert_eq!(
445                expected_immutable_file_digest_map,
446                immutable_file_digest_map
447            );
448        }
449    }
450}