forked from mirror/grapevine
Move database config to separate section
This renames: database_backend -> database.backend database_path -> database.path db_cache_capacity_mb -> database.cache_capacity_mb rocksdb_max_open_files -> database.rocksdb_max_open_files Charles updated the NixOS module. Co-authored-by: Charles Hall <charles@computer.surgery>
This commit is contained in:
parent
79d5d306cc
commit
d26b87a2f2
6 changed files with 39 additions and 28 deletions
|
@ -36,7 +36,7 @@ in
|
|||
'';
|
||||
default = false;
|
||||
};
|
||||
database_path = lib.mkOption {
|
||||
database.path = lib.mkOption {
|
||||
type = types.nonEmptyStr;
|
||||
readOnly = true;
|
||||
description = ''
|
||||
|
|
|
@ -31,15 +31,10 @@ pub(crate) struct Config {
|
|||
pub(crate) tls: Option<TlsConfig>,
|
||||
|
||||
pub(crate) server_name: OwnedServerName,
|
||||
pub(crate) database_backend: String,
|
||||
pub(crate) database_path: String,
|
||||
#[serde(default = "default_db_cache_capacity_mb")]
|
||||
pub(crate) db_cache_capacity_mb: f64,
|
||||
pub(crate) database: DatabaseConfig,
|
||||
|
||||
#[serde(default = "default_cache_capacity_modifier")]
|
||||
pub(crate) cache_capacity_modifier: f64,
|
||||
#[cfg(feature = "rocksdb")]
|
||||
#[serde(default = "default_rocksdb_max_open_files")]
|
||||
pub(crate) rocksdb_max_open_files: i32,
|
||||
#[serde(default = "default_pdu_cache_capacity")]
|
||||
pub(crate) pdu_cache_capacity: u32,
|
||||
#[serde(default = "default_cleanup_second_interval")]
|
||||
|
@ -158,6 +153,17 @@ impl Default for TurnConfig {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub(crate) struct DatabaseConfig {
|
||||
pub(crate) backend: String,
|
||||
pub(crate) path: String,
|
||||
#[serde(default = "default_db_cache_capacity_mb")]
|
||||
pub(crate) cache_capacity_mb: f64,
|
||||
#[cfg(feature = "rocksdb")]
|
||||
#[serde(default = "default_rocksdb_max_open_files")]
|
||||
pub(crate) rocksdb_max_open_files: i32,
|
||||
}
|
||||
|
||||
fn false_fn() -> bool {
|
||||
false
|
||||
}
|
||||
|
|
|
@ -250,7 +250,7 @@ pub(crate) struct KeyValueDatabase {
|
|||
|
||||
impl KeyValueDatabase {
|
||||
fn check_db_setup(config: &Config) -> Result<()> {
|
||||
let path = Path::new(&config.database_path);
|
||||
let path = Path::new(&config.database.path);
|
||||
|
||||
let sqlite_exists = path
|
||||
.join(format!(
|
||||
|
@ -279,14 +279,14 @@ impl KeyValueDatabase {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
if sqlite_exists && config.database_backend != "sqlite" {
|
||||
if sqlite_exists && config.database.backend != "sqlite" {
|
||||
return Err(Error::bad_config(
|
||||
"Found sqlite at database_path, but is not specified in \
|
||||
config.",
|
||||
));
|
||||
}
|
||||
|
||||
if rocksdb_exists && config.database_backend != "rocksdb" {
|
||||
if rocksdb_exists && config.database.backend != "rocksdb" {
|
||||
return Err(Error::bad_config(
|
||||
"Found rocksdb at database_path, but is not specified in \
|
||||
config.",
|
||||
|
@ -305,8 +305,8 @@ impl KeyValueDatabase {
|
|||
pub(crate) async fn load_or_create(config: Config) -> Result<()> {
|
||||
Self::check_db_setup(&config)?;
|
||||
|
||||
if !Path::new(&config.database_path).exists() {
|
||||
fs::create_dir_all(&config.database_path).map_err(|_| {
|
||||
if !Path::new(&config.database.path).exists() {
|
||||
fs::create_dir_all(&config.database.path).map_err(|_| {
|
||||
Error::BadConfig(
|
||||
"Database folder doesn't exists and couldn't be created \
|
||||
(e.g. due to missing permissions). Please create the \
|
||||
|
@ -320,7 +320,8 @@ impl KeyValueDatabase {
|
|||
allow(unused_variables)
|
||||
)]
|
||||
let builder: Arc<dyn KeyValueDatabaseEngine> = match &*config
|
||||
.database_backend
|
||||
.database
|
||||
.backend
|
||||
{
|
||||
#[cfg(feature = "sqlite")]
|
||||
"sqlite" => {
|
||||
|
@ -1106,7 +1107,7 @@ impl KeyValueDatabase {
|
|||
|
||||
info!(
|
||||
"Loaded {} database with version {}",
|
||||
services().globals.config.database_backend,
|
||||
services().globals.config.database.backend,
|
||||
latest_database_version
|
||||
);
|
||||
} else {
|
||||
|
@ -1119,7 +1120,7 @@ impl KeyValueDatabase {
|
|||
|
||||
warn!(
|
||||
"Created new {} database with version {}",
|
||||
services().globals.config.database_backend,
|
||||
services().globals.config.database.backend,
|
||||
latest_database_version
|
||||
);
|
||||
}
|
||||
|
|
|
@ -78,32 +78,36 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
clippy::cast_possible_truncation
|
||||
)]
|
||||
let cache_capacity_bytes =
|
||||
(config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize;
|
||||
(config.database.cache_capacity_mb * 1024.0 * 1024.0) as usize;
|
||||
let rocksdb_cache = Cache::new_lru_cache(cache_capacity_bytes);
|
||||
|
||||
let db_opts = db_options(config.rocksdb_max_open_files, &rocksdb_cache);
|
||||
let db_opts =
|
||||
db_options(config.database.rocksdb_max_open_files, &rocksdb_cache);
|
||||
|
||||
let cfs = DBWithThreadMode::<MultiThreaded>::list_cf(
|
||||
&db_opts,
|
||||
&config.database_path,
|
||||
&config.database.path,
|
||||
)
|
||||
.map(|x| x.into_iter().collect::<HashSet<_>>())
|
||||
.unwrap_or_default();
|
||||
|
||||
let db = DBWithThreadMode::<MultiThreaded>::open_cf_descriptors(
|
||||
&db_opts,
|
||||
&config.database_path,
|
||||
&config.database.path,
|
||||
cfs.iter().map(|name| {
|
||||
ColumnFamilyDescriptor::new(
|
||||
name,
|
||||
db_options(config.rocksdb_max_open_files, &rocksdb_cache),
|
||||
db_options(
|
||||
config.database.rocksdb_max_open_files,
|
||||
&rocksdb_cache,
|
||||
),
|
||||
)
|
||||
}),
|
||||
)?;
|
||||
|
||||
Ok(Arc::new(Engine {
|
||||
rocks: db,
|
||||
max_open_files: config.rocksdb_max_open_files,
|
||||
max_open_files: config.database.rocksdb_max_open_files,
|
||||
cache: rocksdb_cache,
|
||||
old_cfs: cfs,
|
||||
new_cfs: Mutex::default(),
|
||||
|
|
|
@ -110,7 +110,7 @@ impl Engine {
|
|||
|
||||
impl KeyValueDatabaseEngine for Arc<Engine> {
|
||||
fn open(config: &Config) -> Result<Self> {
|
||||
let path = Path::new(&config.database_path).join(format!(
|
||||
let path = Path::new(&config.database.path).join(format!(
|
||||
"{}.db",
|
||||
if config.conduit_compat {
|
||||
"conduit"
|
||||
|
@ -130,9 +130,9 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
clippy::cast_precision_loss,
|
||||
clippy::cast_sign_loss
|
||||
)]
|
||||
let cache_size_per_thread = ((config.db_cache_capacity_mb * 1024.0)
|
||||
/ ((num_cpus::get() as f64 * 2.0) + 1.0))
|
||||
as u32;
|
||||
let cache_size_per_thread =
|
||||
((config.database.cache_capacity_mb * 1024.0)
|
||||
/ ((num_cpus::get() as f64 * 2.0) + 1.0)) as u32;
|
||||
|
||||
let writer =
|
||||
Mutex::new(Engine::prepare_conn(&path, cache_size_per_thread)?);
|
||||
|
|
|
@ -494,14 +494,14 @@ impl Service {
|
|||
|
||||
pub(crate) fn get_media_folder(&self) -> PathBuf {
|
||||
let mut r = PathBuf::new();
|
||||
r.push(self.config.database_path.clone());
|
||||
r.push(self.config.database.path.clone());
|
||||
r.push("media");
|
||||
r
|
||||
}
|
||||
|
||||
pub(crate) fn get_media_file(&self, key: &[u8]) -> PathBuf {
|
||||
let mut r = PathBuf::new();
|
||||
r.push(self.config.database_path.clone());
|
||||
r.push(self.config.database.path.clone());
|
||||
r.push("media");
|
||||
r.push(general_purpose::URL_SAFE_NO_PAD.encode(key));
|
||||
r
|
||||
|
|
Loading…
Reference in a new issue