Skip to content

Commit

Permalink
refactor: [torrust#56] rename destiny DB to target DB
Browse files Browse the repository at this point in the history
  • Loading branch information
josecelano committed Nov 30, 2022
1 parent b400962 commit e8d984d
Show file tree
Hide file tree
Showing 12 changed files with 105 additions and 93 deletions.
18 changes: 9 additions & 9 deletions src/upgrades/from_v1_0_0_to_v2_0_0/databases/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,19 @@ pub async fn current_db(db_filename: &str) -> Arc<SqliteDatabaseV1_0_0> {
}

pub async fn new_db(db_filename: &str) -> Arc<SqliteDatabaseV2_0_0> {
let dest_database_connect_url = format!("sqlite://{}?mode=rwc", db_filename);
Arc::new(SqliteDatabaseV2_0_0::new(&dest_database_connect_url).await)
let target_database_connect_url = format!("sqlite://{}?mode=rwc", db_filename);
Arc::new(SqliteDatabaseV2_0_0::new(&target_database_connect_url).await)
}

pub async fn migrate_destiny_database(dest_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Running migrations in destiny database...");
dest_database.migrate().await;
pub async fn migrate_target_database(target_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Running migrations in the target database...");
target_database.migrate().await;
}

pub async fn reset_destiny_database(dest_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Truncating all tables in destiny database ...");
dest_database
pub async fn reset_target_database(target_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Truncating all tables in target database ...");
target_database
.delete_all_database_rows()
.await
.expect("Can't reset destiny database.");
.expect("Can't reset the target database.");
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,18 @@ use std::sync::Arc;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::{CategoryRecordV2, SqliteDatabaseV2_0_0};

pub async fn transfer_categories(source_database: Arc<SqliteDatabaseV1_0_0>, dest_database: Arc<SqliteDatabaseV2_0_0>) {
pub async fn transfer_categories(source_database: Arc<SqliteDatabaseV1_0_0>, target_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Transferring categories ...");

let source_categories = source_database.get_categories_order_by_id().await.unwrap();
println!("[v1] categories: {:?}", &source_categories);

let result = dest_database.reset_categories_sequence().await.unwrap();
let result = target_database.reset_categories_sequence().await.unwrap();
println!("[v2] reset categories sequence result: {:?}", result);

for cat in &source_categories {
println!("[v2] adding category {:?} with id {:?} ...", &cat.name, &cat.category_id);
let id = dest_database
let id = target_database
.insert_category(&CategoryRecordV2 {
category_id: cat.category_id,
name: cat.name.clone(),
Expand All @@ -23,12 +23,15 @@ pub async fn transfer_categories(source_database: Arc<SqliteDatabaseV1_0_0>, des
.unwrap();

if id != cat.category_id {
panic!("Error copying category {:?} from source DB to destiny DB", &cat.category_id);
panic!(
"Error copying category {:?} from source DB to the target DB",
&cat.category_id
);
}

println!("[v2] category: {:?} {:?} added.", id, &cat.name);
}

let dest_categories = dest_database.get_categories().await.unwrap();
println!("[v2] categories: {:?}", &dest_categories);
let target_categories = target_database.get_categories().await.unwrap();
println!("[v2] categories: {:?}", &target_categories);
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::utils::parse_torrent::decode_torrent;

pub async fn transfer_torrents(
source_database: Arc<SqliteDatabaseV1_0_0>,
dest_database: Arc<SqliteDatabaseV2_0_0>,
target_database: Arc<SqliteDatabaseV2_0_0>,
upload_path: &str,
) {
println!("Transferring torrents ...");
Expand Down Expand Up @@ -47,13 +47,16 @@ pub async fn transfer_torrents(

let torrent_from_file = torrent_from_file_result.unwrap();

let id = dest_database
let id = target_database
.insert_torrent(&TorrentRecordV2::from_v1_data(torrent, &torrent_from_file.info, &uploader))
.await
.unwrap();

if id != torrent.torrent_id {
panic!("Error copying torrent {:?} from source DB to destiny DB", &torrent.torrent_id);
panic!(
"Error copying torrent {:?} from source DB to the target DB",
&torrent.torrent_id
);
}

println!("[v2][torrust_torrents] torrent with id {:?} added.", &torrent.torrent_id);
Expand All @@ -72,7 +75,7 @@ pub async fn transfer_torrents(
&torrent_from_file.info.name, &torrent_from_file.info.length,
);

let file_id = dest_database
let file_id = target_database
.insert_torrent_file_for_torrent_with_one_file(
torrent.torrent_id,
// TODO: it seems med5sum can be None. Why? When?
Expand All @@ -95,7 +98,7 @@ pub async fn transfer_torrents(
&file
);

let file_id = dest_database
let file_id = target_database
.insert_torrent_file_for_torrent_with_multiple_files(torrent, file)
.await;

Expand All @@ -113,7 +116,7 @@ pub async fn transfer_torrents(
&torrent.torrent_id
);

let id = dest_database.insert_torrent_info(torrent).await;
let id = target_database.insert_torrent_info(torrent).await;

println!("[v2][torrust_torrents] torrent info insert result: {:?}.", &id);

Expand Down Expand Up @@ -147,7 +150,7 @@ pub async fn transfer_torrents(
&torrent.torrent_id
);

let announce_url_id = dest_database
let announce_url_id = target_database
.insert_torrent_announce_url(torrent.torrent_id, tracker_url)
.await;

Expand All @@ -162,7 +165,7 @@ pub async fn transfer_torrents(
&torrent.torrent_id
);

let announce_url_id = dest_database
let announce_url_id = target_database
.insert_torrent_announce_url(torrent.torrent_id, &torrent_from_file.announce.unwrap())
.await;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::sync::Arc;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v1_0_0::SqliteDatabaseV1_0_0;
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteDatabaseV2_0_0;

pub async fn transfer_tracker_keys(source_database: Arc<SqliteDatabaseV1_0_0>, dest_database: Arc<SqliteDatabaseV2_0_0>) {
pub async fn transfer_tracker_keys(source_database: Arc<SqliteDatabaseV1_0_0>, target_database: Arc<SqliteDatabaseV2_0_0>) {
println!("Transferring tracker keys ...");

// Transfer table `torrust_tracker_keys`
Expand All @@ -18,7 +18,7 @@ pub async fn transfer_tracker_keys(source_database: Arc<SqliteDatabaseV1_0_0>, d
&tracker_key.key_id
);

let id = dest_database
let id = target_database
.insert_tracker_key(
tracker_key.key_id,
tracker_key.user_id,
Expand All @@ -30,7 +30,7 @@ pub async fn transfer_tracker_keys(source_database: Arc<SqliteDatabaseV1_0_0>, d

if id != tracker_key.key_id {
panic!(
"Error copying tracker key {:?} from source DB to destiny DB",
"Error copying tracker key {:?} from source DB to the target DB",
&tracker_key.key_id
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::sqlite_v2_0_0::SqliteData

pub async fn transfer_users(
source_database: Arc<SqliteDatabaseV1_0_0>,
dest_database: Arc<SqliteDatabaseV2_0_0>,
target_database: Arc<SqliteDatabaseV2_0_0>,
date_imported: &str,
) {
println!("Transferring users ...");
Expand All @@ -22,13 +22,13 @@ pub async fn transfer_users(
&user.username, &user.user_id
);

let id = dest_database
let id = target_database
.insert_imported_user(user.user_id, date_imported, user.administrator)
.await
.unwrap();

if id != user.user_id {
panic!("Error copying user {:?} from source DB to destiny DB", &user.user_id);
panic!("Error copying user {:?} from source DB to the target DB", &user.user_id);
}

println!("[v2][torrust_users] user: {:?} {:?} added.", &user.user_id, &user.username);
Expand All @@ -40,7 +40,7 @@ pub async fn transfer_users(
&user.username, &user.user_id
);

dest_database
target_database
.insert_user_profile(user.user_id, &user.username, &user.email, user.email_verified)
.await
.unwrap();
Expand All @@ -57,7 +57,7 @@ pub async fn transfer_users(
&user.password, &user.user_id
);

dest_database
target_database
.insert_user_password_hash(user.user_id, &user.password)
.await
.unwrap();
Expand Down
28 changes: 14 additions & 14 deletions src/upgrades/from_v1_0_0_to_v2_0_0/upgrader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use std::time::SystemTime;
use chrono::prelude::{DateTime, Utc};
use text_colorizer::*;

use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::{current_db, migrate_destiny_database, new_db, reset_destiny_database};
use crate::upgrades::from_v1_0_0_to_v2_0_0::databases::{current_db, migrate_target_database, new_db, reset_target_database};
use crate::upgrades::from_v1_0_0_to_v2_0_0::transferrers::category_transferrer::transfer_categories;
use crate::upgrades::from_v1_0_0_to_v2_0_0::transferrers::torrent_transferrer::transfer_torrents;
use crate::upgrades::from_v1_0_0_to_v2_0_0::transferrers::tracker_key_transferrer::transfer_tracker_keys;
Expand All @@ -27,9 +27,9 @@ const NUMBER_OF_ARGUMENTS: i64 = 3;

#[derive(Debug)]
pub struct Arguments {
pub source_database_file: String, // The source database in version v1.0.0 we want to migrate
pub destiny_database_file: String, // The new migrated database in version v2.0.0
pub upload_path: String, // The relative dir where torrent files are stored
pub source_database_file: String, // The source database in version v1.0.0 we want to migrate
pub target_database_file: String, // The new migrated database in version v2.0.0
pub upload_path: String, // The relative dir where torrent files are stored
}

fn print_usage() {
Expand Down Expand Up @@ -62,7 +62,7 @@ fn parse_args() -> Arguments {

Arguments {
source_database_file: args[0].clone(),
destiny_database_file: args[1].clone(),
target_database_file: args[1].clone(),
upload_path: args[2].clone(),
}
}
Expand All @@ -73,21 +73,21 @@ pub async fn run_upgrader() {
}

pub async fn upgrade(args: &Arguments, date_imported: &str) {
// Get connection to source database (current DB in settings)
// Get connection to the source database (current DB in settings)
let source_database = current_db(&args.source_database_file).await;

// Get connection to destiny database
let dest_database = new_db(&args.destiny_database_file).await;
// Get connection to the target database (new DB we want to migrate the data)
let target_database = new_db(&args.target_database_file).await;

println!("Upgrading data from version v1.0.0 to v2.0.0 ...");

migrate_destiny_database(dest_database.clone()).await;
reset_destiny_database(dest_database.clone()).await;
migrate_target_database(target_database.clone()).await;
reset_target_database(target_database.clone()).await;

transfer_categories(source_database.clone(), dest_database.clone()).await;
transfer_users(source_database.clone(), dest_database.clone(), date_imported).await;
transfer_tracker_keys(source_database.clone(), dest_database.clone()).await;
transfer_torrents(source_database.clone(), dest_database.clone(), &args.upload_path).await;
transfer_categories(source_database.clone(), target_database.clone()).await;
transfer_users(source_database.clone(), target_database.clone(), date_imported).await;
transfer_tracker_keys(source_database.clone(), target_database.clone()).await;
transfer_torrents(source_database.clone(), target_database.clone(), &args.upload_path).await;
}

/// Current datetime in ISO8601 without time zone.
Expand Down
10 changes: 5 additions & 5 deletions tests/upgrades/from_v1_0_0_to_v2_0_0/testers/category_tester.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::upgrades::from_v1_0_0_to_v2_0_0::sqlite_v2_0_0::SqliteDatabaseV2_0_0;

pub struct CategoryTester {
source_database: Arc<SqliteDatabaseV1_0_0>,
destiny_database: Arc<SqliteDatabaseV2_0_0>,
target_database: Arc<SqliteDatabaseV2_0_0>,
test_data: TestData,
}

Expand All @@ -16,7 +16,7 @@ pub struct TestData {
}

impl CategoryTester {
pub fn new(source_database: Arc<SqliteDatabaseV1_0_0>, destiny_database: Arc<SqliteDatabaseV2_0_0>) -> Self {
pub fn new(source_database: Arc<SqliteDatabaseV1_0_0>, target_database: Arc<SqliteDatabaseV2_0_0>) -> Self {
let category_01 = CategoryRecordV1 {
category_id: 10,
name: "category name 10".to_string(),
Expand All @@ -28,7 +28,7 @@ impl CategoryTester {

Self {
source_database,
destiny_database,
target_database,
test_data: TestData {
categories: vec![category_01, category_02],
},
Expand All @@ -51,9 +51,9 @@ impl CategoryTester {
}

/// Table `torrust_categories`
pub async fn assert_data_in_destiny_db(&self) {
pub async fn assert_data_in_target_db(&self) {
for categories in &self.test_data.categories {
let imported_category = self.destiny_database.get_category(categories.category_id).await.unwrap();
let imported_category = self.target_database.get_category(categories.category_id).await.unwrap();

assert_eq!(imported_category.category_id, categories.category_id);
assert_eq!(imported_category.name, categories.name);
Expand Down
16 changes: 8 additions & 8 deletions tests/upgrades/from_v1_0_0_to_v2_0_0/testers/torrent_tester.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::upgrades::from_v1_0_0_to_v2_0_0::sqlite_v2_0_0::SqliteDatabaseV2_0_0;

pub struct TorrentTester {
source_database: Arc<SqliteDatabaseV1_0_0>,
destiny_database: Arc<SqliteDatabaseV2_0_0>,
target_database: Arc<SqliteDatabaseV2_0_0>,
test_data: TestData,
}

Expand All @@ -22,7 +22,7 @@ pub struct TestData {
impl TorrentTester {
pub fn new(
source_database: Arc<SqliteDatabaseV1_0_0>,
destiny_database: Arc<SqliteDatabaseV2_0_0>,
target_database: Arc<SqliteDatabaseV2_0_0>,
user: &UserRecordV1,
category_id: i64,
) -> Self {
Expand Down Expand Up @@ -69,7 +69,7 @@ impl TorrentTester {

Self {
source_database,
destiny_database,
target_database,
test_data: TestData {
torrents: vec![torrent_01, torrent_02],
user: user.clone(),
Expand All @@ -83,7 +83,7 @@ impl TorrentTester {
}
}

pub async fn assert_data_in_destiny_db(&self, upload_path: &str) {
pub async fn assert_data_in_target_db(&self, upload_path: &str) {
for torrent in &self.test_data.torrents {
let filepath = self.torrent_file_path(upload_path, torrent.torrent_id);

Expand All @@ -102,7 +102,7 @@ impl TorrentTester {

/// Table `torrust_torrents`
async fn assert_torrent(&self, torrent: &TorrentRecordV1, torrent_file: &Torrent) {
let imported_torrent = self.destiny_database.get_torrent(torrent.torrent_id).await.unwrap();
let imported_torrent = self.target_database.get_torrent(torrent.torrent_id).await.unwrap();

assert_eq!(imported_torrent.torrent_id, torrent.torrent_id);
assert_eq!(imported_torrent.uploader_id, self.test_data.user.user_id);
Expand All @@ -126,7 +126,7 @@ impl TorrentTester {

/// Table `torrust_torrent_info`
async fn assert_torrent_info(&self, torrent: &TorrentRecordV1) {
let torrent_info = self.destiny_database.get_torrent_info(torrent.torrent_id).await.unwrap();
let torrent_info = self.target_database.get_torrent_info(torrent.torrent_id).await.unwrap();

assert_eq!(torrent_info.torrent_id, torrent.torrent_id);
assert_eq!(torrent_info.title, torrent.title);
Expand All @@ -136,7 +136,7 @@ impl TorrentTester {
/// Table `torrust_torrent_announce_urls`
async fn assert_torrent_announce_urls(&self, torrent: &TorrentRecordV1, torrent_file: &Torrent) {
let torrent_announce_urls = self
.destiny_database
.target_database
.get_torrent_announce_urls(torrent.torrent_id)
.await
.unwrap();
Expand All @@ -153,7 +153,7 @@ impl TorrentTester {

/// Table `torrust_torrent_files`
async fn assert_torrent_files(&self, torrent: &TorrentRecordV1, torrent_file: &Torrent) {
let db_torrent_files = self.destiny_database.get_torrent_files(torrent.torrent_id).await.unwrap();
let db_torrent_files = self.target_database.get_torrent_files(torrent.torrent_id).await.unwrap();

if torrent_file.is_a_single_file_torrent() {
let db_torrent_file = &db_torrent_files[0];
Expand Down
Loading

0 comments on commit e8d984d

Please sign in to comment.