Skip to content

Commit

Permalink
feat: [torrust#56] transfer categories from db v1.0.0 to v2.0.0
Browse files Browse the repository at this point in the history
First action for the command to upgrade data. It transfers the
categories from the current DB schema (v1.0.0) to the new DB schema.
  • Loading branch information
josecelano committed Nov 30, 2022
1 parent 7513df0 commit b92fb08
Show file tree
Hide file tree
Showing 6 changed files with 200 additions and 56 deletions.
91 changes: 59 additions & 32 deletions src/bin/db_migrate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,11 @@

use std::sync::Arc;
use torrust_index_backend::config::Configuration;
use torrust_index_backend::databases::database::{
connect_database, connect_database_without_running_migrations,
};

#[actix_web::main]
async fn main() {
let dest_database_connect_url = "sqlite://data_v2.db?mode=rwc";
use torrust_index_backend::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0;
use torrust_index_backend::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0;

async fn current_db() -> Arc<SqliteDatabaseV1_0_0> {
// Connect to the old v1.0.0 DB
let cfg = match Configuration::load_from_file().await {
Ok(config) => Arc::new(config),
Err(error) => {
Expand All @@ -20,36 +17,66 @@ async fn main() {

let settings = cfg.settings.read().await;

// Connect to the current v1.0.0 DB
let source_database = Arc::new(
connect_database_without_running_migrations(&settings.database.connect_url)
.await
.expect("Can't connect to source DB."),
);
Arc::new(SqliteDatabaseV1_0_0::new(&settings.database.connect_url).await)
}

async fn new_db(db_filename: String) -> Arc<SqliteDatabaseV2_0_0> {
let dest_database_connect_url = format!("sqlite://{}?mode=rwc", db_filename);
Arc::new(SqliteDatabaseV2_0_0::new(&dest_database_connect_url).await)
}

async fn reset_destiny_database(dest_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Truncating all tables in destiny database ...");
dest_database
.delete_all_database_rows()
.await
.expect("Can't reset destiny database.");
}

// Connect to the new v2.0.0 DB (running migrations)
let dest_database = Arc::new(
connect_database(&dest_database_connect_url)
async fn transfer_categories(
source_database: Arc<SqliteDatabaseV1_0_0>,
dest_database: Arc<SqliteDatabaseV2_0_0>,
) {
let source_categories = source_database.get_categories_order_by_id().await.unwrap();
println!("[v1] categories: {:?}", &source_categories);

let result = dest_database.reset_categories_sequence().await.unwrap();
println!("result {:?}", result);

for cat in &source_categories {
println!(
"[v2] adding category: {:?} {:?} ...",
&cat.category_id, &cat.name
);
let id = dest_database
.insert_category_and_get_id(&cat.name)
.await
.expect("Can't connect to dest DB."),
);
.unwrap();

if id != cat.category_id {
panic!(
"Error copying category {:?} from source DB to destiny DB",
&cat.category_id
);
}

println!("Upgrading database from v1.0.0 to v2.0.0 ...");
println!("[v2] category: {:?} {:?} added.", id, &cat.name);
}

// It's just a test for the source connection.
// Print categories in current DB
let categories = source_database.get_categories().await;
println!("[v1] categories: {:?}", &categories);
let dest_categories = dest_database.get_categories().await.unwrap();
println!("[v2] categories: {:?}", &dest_categories);
}

#[actix_web::main]
async fn main() {
// Get connections to source adn destiny databases
let source_database = current_db().await;
let dest_database = new_db("data_v2.db".to_string()).await;

// It's just a test for the dest connection.
// Print categories in new DB
let categories = dest_database.get_categories().await;
println!("[v2] categories: {:?}", &categories);
println!("Upgrading data from version v1.0.0 to v2.0.0 ...");

// Transfer categories
reset_destiny_database(dest_database.clone()).await;
transfer_categories(source_database.clone(), dest_database.clone()).await;

/* TODO:
- Transfer categories: remove categories from seeding, reset sequence for IDs, copy categories in the right order to keep the same ids.
- ...
*/
// TODO: WIP. We have to transfer data from the 5 tables in V1 and the torrent files in folder `uploads`.
}
16 changes: 0 additions & 16 deletions src/databases/database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,22 +77,6 @@ pub async fn connect_database(db_path: &str) -> Result<Box<dyn Database>, Databa
}
}

/// Connect to a database without running migrations
pub async fn connect_database_without_running_migrations(db_path: &str) -> Result<Box<dyn Database>, DatabaseError> {
match &db_path.chars().collect::<Vec<char>>() as &[char] {
['s', 'q', 'l', 'i', 't', 'e', ..] => {
let db = SqliteDatabase::new_without_running_migrations(db_path).await;
Ok(Box::new(db))
}
['m', 'y', 's', 'q', 'l', ..] => {
todo!()
}
_ => {
Err(DatabaseError::UnrecognizedDatabaseDriver)
}
}
}

/// Trait for database implementations.
#[async_trait]
pub trait Database: Sync + Send {
Expand Down
2 changes: 2 additions & 0 deletions src/databases/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
pub mod database;
pub mod mysql;
pub mod sqlite;
pub mod sqlite_v1_0_0;
pub mod sqlite_v2_0_0;
8 changes: 0 additions & 8 deletions src/databases/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,6 @@ impl SqliteDatabase {

Self { pool: db }
}

pub async fn new_without_running_migrations(database_url: &str) -> Self {
let db = SqlitePoolOptions::new()
.connect(database_url)
.await
.expect("Unable to create database pool.");
Self { pool: db }
}
}

#[async_trait]
Expand Down
30 changes: 30 additions & 0 deletions src/databases/sqlite_v1_0_0.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
use super::database::DatabaseError;
use serde::{Deserialize, Serialize};
use sqlx::sqlite::SqlitePoolOptions;
use sqlx::{query_as, SqlitePool};

#[derive(Debug, Serialize, Deserialize, sqlx::FromRow)]
pub struct Category {
pub category_id: i64,
pub name: String,
}
pub struct SqliteDatabaseV1_0_0 {
pub pool: SqlitePool,
}

impl SqliteDatabaseV1_0_0 {
pub async fn new(database_url: &str) -> Self {
let db = SqlitePoolOptions::new()
.connect(database_url)
.await
.expect("Unable to create database pool.");
Self { pool: db }
}

pub async fn get_categories_order_by_id(&self) -> Result<Vec<Category>, DatabaseError> {
query_as::<_, Category>("SELECT category_id, name FROM torrust_categories ORDER BY category_id ASC")
.fetch_all(&self.pool)
.await
.map_err(|_| DatabaseError::Error)
}
}
109 changes: 109 additions & 0 deletions src/databases/sqlite_v2_0_0.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
use super::database::DatabaseError;
use serde::{Deserialize, Serialize};
use sqlx::sqlite::{SqlitePoolOptions, SqliteQueryResult};
use sqlx::{query, query_as, SqlitePool};

#[derive(Debug, Serialize, Deserialize, sqlx::FromRow)]
pub struct Category {
pub category_id: i64,
pub name: String,
}
pub struct SqliteDatabaseV2_0_0 {
pub pool: SqlitePool,
}

impl SqliteDatabaseV2_0_0 {
pub async fn new(database_url: &str) -> Self {
let db = SqlitePoolOptions::new()
.connect(database_url)
.await
.expect("Unable to create database pool.");
Self { pool: db }
}

pub async fn reset_categories_sequence(&self) -> Result<SqliteQueryResult, DatabaseError> {
query("DELETE FROM `sqlite_sequence` WHERE `name` = 'torrust_categories'")
.execute(&self.pool)
.await
.map_err(|_| DatabaseError::Error)
}

pub async fn get_categories(&self) -> Result<Vec<Category>, DatabaseError> {
query_as::<_, Category>("SELECT tc.category_id, tc.name, COUNT(tt.category_id) as num_torrents FROM torrust_categories tc LEFT JOIN torrust_torrents tt on tc.category_id = tt.category_id GROUP BY tc.name")
.fetch_all(&self.pool)
.await
.map_err(|_| DatabaseError::Error)
}

pub async fn insert_category_and_get_id(&self, category_name: &str) -> Result<i64, DatabaseError> {
query("INSERT INTO torrust_categories (name) VALUES (?)")
.bind(category_name)
.execute(&self.pool)
.await
.map(|v| v.last_insert_rowid())
.map_err(|e| match e {
sqlx::Error::Database(err) => {
if err.message().contains("UNIQUE") {
DatabaseError::CategoryAlreadyExists
} else {
DatabaseError::Error
}
}
_ => DatabaseError::Error,
})
}

pub async fn delete_all_database_rows(&self) -> Result<(), DatabaseError> {
query("DELETE FROM torrust_categories;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_torrents;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_tracker_keys;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_users;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_user_authentication;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_user_bans;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_user_invitations;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_user_profiles;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_torrents;")
.execute(&self.pool)
.await
.unwrap();

query("DELETE FROM torrust_user_public_keys;")
.execute(&self.pool)
.await
.unwrap();

Ok(())
}
}

0 comments on commit b92fb08

Please sign in to comment.