mithril_client/file_downloader/
http.rs

1use std::{
2    io::{BufReader, Read, Write},
3    path::Path,
4};
5
6use anyhow::{anyhow, Context};
7use async_trait::async_trait;
8use flate2::read::GzDecoder;
9use flume::{Receiver, Sender};
10use futures::StreamExt;
11use reqwest::{Response, StatusCode, Url};
12use slog::{debug, Logger};
13use tar::Archive;
14use tokio::fs::File;
15use tokio::io::AsyncReadExt;
16
17use mithril_common::{logging::LoggerExtensions, StdResult};
18
19use crate::common::CompressionAlgorithm;
20use crate::feedback::FeedbackSender;
21use crate::utils::StreamReader;
22
23use super::{interface::DownloadEvent, FileDownloader, FileDownloaderUri};
24
25/// A file downloader that only handles download through HTTP.
26pub struct HttpFileDownloader {
27    http_client: reqwest::Client,
28    feedback_sender: FeedbackSender,
29    logger: Logger,
30}
31
32impl HttpFileDownloader {
33    /// Constructs a new `HttpFileDownloader`.
34    pub fn new(feedback_sender: FeedbackSender, logger: Logger) -> StdResult<Self> {
35        let http_client = reqwest::ClientBuilder::new()
36            .build()
37            .with_context(|| "Building http client for HttpFileDownloader failed")?;
38
39        Ok(Self {
40            http_client,
41            feedback_sender,
42            logger: logger.new_with_component_name::<Self>(),
43        })
44    }
45
46    async fn get(&self, location: &str) -> StdResult<Response> {
47        debug!(self.logger, "GET Snapshot location='{location}'.");
48        let request_builder = self.http_client.get(location);
49        let response = request_builder.send().await.with_context(|| {
50            format!("Cannot perform a GET for the snapshot (location='{location}')")
51        })?;
52
53        match response.status() {
54            StatusCode::OK => Ok(response),
55            StatusCode::NOT_FOUND => Err(anyhow!("Location='{location} not found")),
56            status_code => Err(anyhow!("Unhandled error {status_code}")),
57        }
58    }
59
60    fn file_scheme_to_local_path(file_url: &str) -> Option<String> {
61        Url::parse(file_url)
62            .ok()
63            .filter(|url| url.scheme() == "file")
64            .and_then(|url| url.to_file_path().ok())
65            .map(|path| path.to_string_lossy().into_owned())
66    }
67
68    /// Stream the `location` directly from the local filesystem
69    async fn download_local_file(
70        &self,
71        local_path: &str,
72        sender: &Sender<Vec<u8>>,
73        download_event_type: DownloadEvent,
74        file_size: u64,
75    ) -> StdResult<()> {
76        let mut downloaded_bytes: u64 = 0;
77        let mut file = File::open(local_path).await?;
78        let size = match file.metadata().await {
79            Ok(metadata) => metadata.len(),
80            Err(_) => file_size,
81        };
82
83        self.feedback_sender
84            .send_event(download_event_type.build_download_started_event(size))
85            .await;
86
87        loop {
88            // We can either allocate here each time, or clone a shared buffer into sender.
89            // A larger read buffer is faster, less context switches:
90            let mut buffer = vec![0; 16 * 1024 * 1024];
91            let bytes_read = file.read(&mut buffer).await?;
92            if bytes_read == 0 {
93                break;
94            }
95            buffer.truncate(bytes_read);
96            sender.send_async(buffer).await.with_context(|| {
97                format!(
98                    "Local file read: could not write {bytes_read} bytes to stream."
99                )
100            })?;
101            downloaded_bytes += bytes_read as u64;
102            let event = download_event_type.build_download_progress_event(downloaded_bytes, size);
103            self.feedback_sender.send_event(event).await;
104        }
105
106        self.feedback_sender
107            .send_event(download_event_type.build_download_completed_event())
108            .await;
109
110        Ok(())
111    }
112
113    /// Stream the `location` remotely
114    async fn download_remote_file(
115        &self,
116        location: &str,
117        sender: &Sender<Vec<u8>>,
118        download_event_type: DownloadEvent,
119        file_size: u64,
120    ) -> StdResult<()> {
121        let mut downloaded_bytes: u64 = 0;
122        let response = self.get(location).await?;
123        let size = response.content_length().unwrap_or(file_size);
124        let mut remote_stream = response.bytes_stream();
125
126        self.feedback_sender
127            .send_event(download_event_type.build_download_started_event(size))
128            .await;
129
130        while let Some(item) = remote_stream.next().await {
131            let chunk = item.with_context(|| "Download: Could not read from byte stream")?;
132            sender.send_async(chunk.to_vec()).await.with_context(|| {
133                format!("Download: could not write {} bytes to stream.", chunk.len())
134            })?;
135            downloaded_bytes += chunk.len() as u64;
136            let event = download_event_type.build_download_progress_event(downloaded_bytes, size);
137            self.feedback_sender.send_event(event).await;
138        }
139
140        self.feedback_sender
141            .send_event(download_event_type.build_download_completed_event())
142            .await;
143
144        Ok(())
145    }
146
147    fn unpack_file(
148        stream: Receiver<Vec<u8>>,
149        compression_algorithm: Option<CompressionAlgorithm>,
150        unpack_dir: &Path,
151        download_id: String,
152    ) -> StdResult<()> {
153        let input = StreamReader::new(stream);
154        match compression_algorithm {
155            Some(CompressionAlgorithm::Gzip) => {
156                let gzip_decoder = GzDecoder::new(input);
157                let mut file_archive = Archive::new(gzip_decoder);
158                file_archive.unpack(unpack_dir).with_context(|| {
159                    format!(
160                        "Could not unpack with 'Gzip' from streamed data to directory '{}'",
161                        unpack_dir.display()
162                    )
163                })?;
164            }
165            Some(CompressionAlgorithm::Zstandard) => {
166                let zstandard_decoder = zstd::Decoder::new(input)
167                    .with_context(|| "Unpack failed: Create Zstandard decoder error")?;
168                let mut file_archive = Archive::new(zstandard_decoder);
169                file_archive.unpack(unpack_dir).with_context(|| {
170                    format!(
171                        "Could not unpack with 'Zstd' from streamed data to directory '{}'",
172                        unpack_dir.display()
173                    )
174                })?;
175            }
176            None => {
177                let file_path = unpack_dir.join(download_id);
178                if file_path.exists() {
179                    std::fs::remove_file(file_path.clone())?;
180                }
181                let mut file = std::fs::File::create(file_path)?;
182                let input_buffered = BufReader::new(input);
183                for byte in input_buffered.bytes() {
184                    file.write_all(&[byte?])?;
185                }
186                file.flush()?;
187            }
188        };
189
190        Ok(())
191    }
192}
193
194#[async_trait]
195impl FileDownloader for HttpFileDownloader {
196    async fn download_unpack(
197        &self,
198        location: &FileDownloaderUri,
199        file_size: u64,
200        target_dir: &Path,
201        compression_algorithm: Option<CompressionAlgorithm>,
202        download_event_type: DownloadEvent,
203    ) -> StdResult<()> {
204        if !target_dir.is_dir() {
205            Err(
206                anyhow!("target path is not a directory or does not exist: `{target_dir:?}`")
207                    .context("Download-Unpack: prerequisite error"),
208            )?;
209        }
210
211        let (sender, receiver) = flume::bounded(32);
212        let dest_dir = target_dir.to_path_buf();
213        let download_id = download_event_type.download_id().to_owned();
214        let unpack_thread = tokio::task::spawn_blocking(move || -> StdResult<()> {
215            Self::unpack_file(receiver, compression_algorithm, &dest_dir, download_id)
216        });
217        if let Some(local_path) = Self::file_scheme_to_local_path(location.as_str()) {
218            self.download_local_file(&local_path, &sender, download_event_type, file_size)
219                .await?;
220        } else {
221            self.download_remote_file(location.as_str(), &sender, download_event_type, file_size)
222                .await?;
223        }
224        drop(sender);
225        unpack_thread
226            .await
227            .with_context(|| {
228                format!(
229                    "Unpack: panic while unpacking to dir '{}'",
230                    target_dir.display()
231                )
232            })?
233            .with_context(|| {
234                format!("Unpack: could not unpack to dir '{}'", target_dir.display())
235            })?;
236
237        Ok(())
238    }
239}
240
241#[cfg(test)]
242mod tests {
243    use std::sync::Arc;
244
245    use httpmock::MockServer;
246
247    use mithril_common::{entities::FileUri, test_utils::TempDir};
248
249    use crate::{
250        feedback::{MithrilEvent, MithrilEventCardanoDatabase, StackFeedbackReceiver},
251        test_utils::TestLogger,
252    };
253
254    use super::*;
255
256    #[cfg(not(target_family = "windows"))]
257    fn local_file_uri(path: &Path) -> FileDownloaderUri {
258        FileDownloaderUri::FileUri(FileUri(format!(
259            "file://{}",
260            path.canonicalize().unwrap().to_string_lossy()
261        )))
262    }
263
264    #[cfg(target_family = "windows")]
265    fn local_file_uri(path: &Path) -> FileDownloaderUri {
266        // We need to transform `\\?\C:\data\Temp\mithril_test\snapshot.txt` to `file://C:/data/Temp/mithril_test/snapshot.txt`
267        FileDownloaderUri::FileUri(FileUri(format!(
268            "file:/{}",
269            path.canonicalize()
270                .unwrap()
271                .to_string_lossy()
272                .replace("\\", "/")
273                .replace("?/", ""),
274        )))
275    }
276
277    #[tokio::test]
278    async fn test_download_http_file_send_feedback() {
279        let target_dir = TempDir::create(
280            "client-http-downloader",
281            "test_download_http_file_send_feedback",
282        );
283        let content = "Hello, world!";
284        let size = content.len() as u64;
285        let server = MockServer::start();
286        server.mock(|when, then| {
287            when.method(httpmock::Method::GET).path("/snapshot.tar");
288            then.status(200)
289                .body(content)
290                .header(reqwest::header::CONTENT_LENGTH.as_str(), size.to_string());
291        });
292        let feedback_receiver = Arc::new(StackFeedbackReceiver::new());
293        let http_file_downloader = HttpFileDownloader::new(
294            FeedbackSender::new(&[feedback_receiver.clone()]),
295            TestLogger::stdout(),
296        )
297        .unwrap();
298        let download_id = "id".to_string();
299
300        http_file_downloader
301            .download_unpack(
302                &FileDownloaderUri::FileUri(FileUri(server.url("/snapshot.tar"))),
303                0,
304                &target_dir,
305                None,
306                DownloadEvent::Digest {
307                    download_id: download_id.clone(),
308                },
309            )
310            .await
311            .unwrap();
312
313        let expected_events = vec![
314            MithrilEvent::CardanoDatabase(MithrilEventCardanoDatabase::DigestDownloadStarted {
315                download_id: download_id.clone(),
316                size,
317            }),
318            MithrilEvent::CardanoDatabase(MithrilEventCardanoDatabase::DigestDownloadProgress {
319                download_id: download_id.clone(),
320                downloaded_bytes: size,
321                size,
322            }),
323            MithrilEvent::CardanoDatabase(MithrilEventCardanoDatabase::DigestDownloadCompleted {
324                download_id: download_id.clone(),
325            }),
326        ];
327        assert_eq!(expected_events, feedback_receiver.stacked_events());
328    }
329
330    #[tokio::test]
331    async fn test_download_local_file_send_feedback() {
332        let target_dir = TempDir::create(
333            "client-http-downloader",
334            "test_download_local_file_send_feedback",
335        );
336        let content = "Hello, world!";
337        let size = content.len() as u64;
338
339        let source_file_path = target_dir.join("snapshot.txt");
340        let mut file = std::fs::File::create(&source_file_path).unwrap();
341        file.write_all(content.as_bytes()).unwrap();
342
343        let feedback_receiver = Arc::new(StackFeedbackReceiver::new());
344        let http_file_downloader = HttpFileDownloader::new(
345            FeedbackSender::new(&[feedback_receiver.clone()]),
346            TestLogger::stdout(),
347        )
348        .unwrap();
349        let download_id = "id".to_string();
350
351        http_file_downloader
352            .download_unpack(
353                &local_file_uri(&source_file_path),
354                0,
355                &target_dir,
356                None,
357                DownloadEvent::Digest {
358                    download_id: download_id.clone(),
359                },
360            )
361            .await
362            .unwrap();
363
364        let expected_events = vec![
365            MithrilEvent::CardanoDatabase(MithrilEventCardanoDatabase::DigestDownloadStarted {
366                download_id: download_id.clone(),
367                size,
368            }),
369            MithrilEvent::CardanoDatabase(MithrilEventCardanoDatabase::DigestDownloadProgress {
370                download_id: download_id.clone(),
371                downloaded_bytes: size,
372                size,
373            }),
374            MithrilEvent::CardanoDatabase(MithrilEventCardanoDatabase::DigestDownloadCompleted {
375                download_id: download_id.clone(),
376            }),
377        ];
378        assert_eq!(expected_events, feedback_receiver.stacked_events());
379    }
380}