mithril_aggregator/commands/
database_command.rs1use std::{collections::HashMap, path::PathBuf, sync::Arc};
2
3use anyhow::Context;
4use clap::{Parser, Subcommand};
5use config::{ConfigBuilder, Map, Value, builder::DefaultState};
6use mithril_persistence::sqlite::{SqliteCleaner, SqliteCleaningTask, SqliteConnection};
7use serde::{Deserialize, Serialize};
8use slog::{Logger, debug};
9
10use mithril_common::StdResult;
11use mithril_doc::{Documenter, StructDoc};
12
13use crate::{
14 ConfigurationSource, ExecutionEnvironment, dependency_injection::DependenciesBuilder,
15 extract_all,
16};
17
18#[derive(Debug, Clone, Serialize, Deserialize, Documenter)]
19pub struct DatabaseCommandConfiguration {
20 #[example = "`./mithril-aggregator/stores`"]
21 pub data_stores_directory: PathBuf,
22}
23
24impl ConfigurationSource for DatabaseCommandConfiguration {
25 fn environment(&self) -> ExecutionEnvironment {
26 ExecutionEnvironment::Production
27 }
28
29 fn data_stores_directory(&self) -> PathBuf {
30 self.data_stores_directory.clone()
31 }
32
33 fn cardano_transactions_database_connection_pool_size(&self) -> usize {
34 1
35 }
36}
37
38#[derive(Parser, Debug, Clone)]
40pub struct DatabaseCommand {
41 #[clap(subcommand)]
43 pub database_subcommand: DatabaseSubCommand,
44}
45
46impl DatabaseCommand {
47 pub async fn execute(
48 &self,
49 root_logger: Logger,
50 config_builder: ConfigBuilder<DefaultState>,
51 ) -> StdResult<()> {
52 self.database_subcommand.execute(root_logger, config_builder).await
53 }
54
55 pub fn extract_config(command_path: String) -> HashMap<String, StructDoc> {
56 extract_all!(
57 command_path,
58 DatabaseSubCommand,
59 Migrate = { MigrateCommand },
60 Vacuum = { VacuumCommand },
61 )
62 }
63}
64
65#[derive(Debug, Clone, Subcommand)]
66pub enum DatabaseSubCommand {
67 Migrate(MigrateCommand),
69
70 Vacuum(VacuumCommand),
72}
73
74impl DatabaseSubCommand {
75 pub async fn execute(
76 &self,
77 root_logger: Logger,
78 config_builder: ConfigBuilder<DefaultState>,
79 ) -> StdResult<()> {
80 match self {
81 Self::Migrate(cmd) => cmd.execute(root_logger, config_builder).await,
82 Self::Vacuum(cmd) => cmd.execute(root_logger, config_builder).await,
83 }
84 }
85}
86
87#[derive(Parser, Debug, Clone)]
88pub struct MigrateCommand {
89 #[clap(long, env = "STORES_DIRECTORY")]
91 stores_directory: PathBuf,
92}
93
94impl MigrateCommand {
95 pub async fn execute(
96 &self,
97 root_logger: Logger,
98 config_builder: ConfigBuilder<DefaultState>,
99 ) -> StdResult<()> {
100 let mut config: DatabaseCommandConfiguration = config_builder
101 .build()
102 .with_context(|| "configuration build error")?
103 .try_deserialize()
104 .with_context(|| "configuration deserialize error")?;
105 config.data_stores_directory = self.stores_directory.clone();
106 debug!(root_logger, "DATABASE MIGRATE command"; "config" => format!("{config:?}"));
107 println!(
108 "Migrating databases from stores directory: {}",
109 self.stores_directory.to_string_lossy()
110 );
111 let mut dependencies_builder =
112 DependenciesBuilder::new(root_logger.clone(), Arc::new(config));
113
114 dependencies_builder
115 .create_database_command_container()
116 .await
117 .with_context(|| {
118 "Failed to run databases migrations while creating the database command dependencies container"
119 })?;
120
121 Ok(())
122 }
123
124 pub fn extract_config(command_path: String) -> HashMap<String, StructDoc> {
125 HashMap::from([(command_path, DatabaseCommandConfiguration::extract())])
126 }
127}
128
129#[derive(Parser, Debug, Clone)]
130pub struct VacuumCommand {
131 #[clap(long, env = "STORES_DIRECTORY")]
133 stores_directory: PathBuf,
134}
135
136impl VacuumCommand {
137 async fn vacuum_database(
138 db_connection: Arc<SqliteConnection>,
139 logger: Logger,
140 ) -> StdResult<()> {
141 SqliteCleaner::new(&db_connection)
142 .with_logger(logger)
143 .with_tasks(&[SqliteCleaningTask::Vacuum])
144 .run()?;
145
146 Ok(())
147 }
148
149 pub async fn execute(
150 &self,
151 root_logger: Logger,
152 config_builder: ConfigBuilder<DefaultState>,
153 ) -> StdResult<()> {
154 let mut config: DatabaseCommandConfiguration = config_builder
155 .build()
156 .with_context(|| "configuration build error")?
157 .try_deserialize()
158 .with_context(|| "configuration deserialize error")?;
159 config.data_stores_directory = self.stores_directory.clone();
160 debug!(root_logger, "DATABASE VACUUM command"; "config" => format!("{config:?}"));
161 println!(
162 "Vacuuming database from stores directory: {}",
163 self.stores_directory.to_string_lossy()
164 );
165 let mut dependencies_builder =
166 DependenciesBuilder::new(root_logger.clone(), Arc::new(config.clone()));
167
168 let dependency_container =
169 dependencies_builder
170 .create_database_command_container()
171 .await
172 .with_context(|| "Failed to create the database command dependencies container")?;
173
174 Self::vacuum_database(dependency_container.main_db_connection, root_logger.clone())
175 .await
176 .with_context(|| "Failed to vacuum the main database")?;
177
178 Ok(())
179 }
180
181 pub fn extract_config(command_path: String) -> HashMap<String, StructDoc> {
182 HashMap::from([(command_path, DatabaseCommandConfiguration::extract())])
183 }
184}
185
186#[cfg(test)]
187mod tests {
188 use std::sync::Arc;
189
190 use mithril_common::temp_dir;
191
192 use crate::test_tools::TestLogger;
193
194 use super::*;
195
196 #[tokio::test]
197 async fn create_container_does_not_panic() {
198 let config = DatabaseCommandConfiguration {
199 data_stores_directory: temp_dir!().join("stores"),
200 };
201 let mut dependencies_builder =
202 DependenciesBuilder::new(TestLogger::stdout(), Arc::new(config));
203
204 dependencies_builder
205 .create_database_command_container()
206 .await
207 .expect("Expected container creation to succeed without panicking");
208 }
209}