Skip to content

Commit

Permalink
refactor backup (#28)
Browse files Browse the repository at this point in the history
* change file name

* refactor backup files

* update unit tests

* format source code
  • Loading branch information
jabbar-gabbar authored Jan 23, 2023
1 parent fbbfffb commit 4eeff7f
Show file tree
Hide file tree
Showing 5 changed files with 287 additions and 150 deletions.
108 changes: 108 additions & 0 deletions src/backup.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
use log::{error, info, log_enabled, Level};

use crate::{
aws_s3::AwsS3,
file_backup, filter,
inventory::{self, InventoryPath},
prepare_upload,
settings::Settings,
source::{self, SourceDir},
};

pub async fn run(settings: Settings, aws_s3: &dyn AwsS3) {
for config in settings.backup {
if log_enabled!(Level::Info) {
log_backing_up(
config.source_directory_path(),
config.s3_bucket(),
config.inventory_file_path(),
);
}

let inv_path = InventoryPath {
path: config.inventory_file_path().to_string(),
};

let inventory = match inventory::list(&inv_path) {
Ok(inv) => inv,
Err(e) => {
log_error(
&format!(
"Could not read inventory file at {}",
config.inventory_file_path()
),
&e.to_string(),
);
continue;
}
};

let source_dir = SourceDir {
dir_path: config.source_directory_path().to_string(),
};

let source = match source::list(&source_dir) {
Ok(src) => src,
Err(e) => {
log_error(
&format!(
"Could not read source dir at {}",
config.source_directory_path()
),
&e.to_string(),
);
continue;
}
};

let filtered = filter::filter(config.excluded_extensions(), source);

let prepared =
prepare_upload::prepare(&filtered, &inventory, config.source_directory_path());

let count = file_backup::backup(aws_s3, &prepared, config.s3_bucket(), &inv_path).await;

if log_enabled!(Level::Info) {
log_metric(
config.source_directory_path(),
config.s3_bucket(),
filtered.len(),
inventory.len(),
prepared.len(),
count,
)
}
}

if log_enabled!(Level::Info) {
info!("Backup complete!");
}
}

fn log_backing_up(dir: &str, bucket: &str, inventory: &str) {
info!("----\n");
info!("----\n");
info!("---- Backing up dir: {} --> bucket: {} ----", dir, bucket);
info!("---- Using inventory file : {}", inventory);
}

fn log_metric(
dir: &str,
bucket: &str,
source: usize,
inv: usize,
prepared: usize,
uploaded: usize,
) {
info!("---- backed up: {} ==> bucket: {} ----", dir, bucket);
info!(
"---- source: {}, inventory: {}, prepared: {}, uploaded: {} ----",
source, inv, prepared, uploaded
);
}

/// Log custom error
fn log_error(msg: &str, err: &str) {
error!("{}", msg);
error!("{}", err);
}
232 changes: 144 additions & 88 deletions src/file_backup.rs
Original file line number Diff line number Diff line change
@@ -1,107 +1,163 @@
use log::{error, info, log_enabled, Level};

use crate::{
aws_s3::AwsS3,
filter,
inventory::{self, InventoryPath},
prepare_upload,
settings::Settings,
source::{self, SourceDir},
inventory::{self, Append},
prepare_upload::UploadItem,
uploader,
};
use log::error;

pub async fn run(settings: Settings, aws_s3: &dyn AwsS3) {
for backup in settings.backup {
if log_enabled!(Level::Info) {
log_backing_up(
backup.source_directory_path(),
backup.s3_bucket(),
backup.inventory_file_path(),
);
pub async fn backup(
aws_s3: &dyn AwsS3,
uploads: &Vec<UploadItem>,
s3_bucket: &str,
append_impl: &dyn Append,
) -> usize {
let mut count = 0;
for upload in uploads {
if uploader::upload_one(aws_s3, upload, s3_bucket).await {
match inventory::append_one(append_impl, &upload.object_key_name()) {
Ok(_) => {
count += 1;
}
Err(e) => {
error!(
"Could not append inventory file {} with {}",
append_impl.get_path(),
&upload.file_path()
);
error!("{}", e);
}
}
}
}
count
}

let inv_path = InventoryPath {
path: backup.inventory_file_path().to_string(),
};

let inventory = match inventory::list(&inv_path) {
Ok(inv) => inv,
Err(e) => {
log_error(
&format!(
"Could not read inventory file at {}",
backup.inventory_file_path()
),
&e.to_string(),
);
continue;
}
};

let source_dir = SourceDir {
dir_path: backup.source_directory_path().to_string(),
};

let source = match source::list(&source_dir) {
Ok(src) => src,
Err(e) => {
log_error(
&format!(
"Could not read source dir at {}",
backup.source_directory_path()
),
&e.to_string(),
);
continue;
}
};
#[cfg(test)]
mod tests {
use std::{
collections::HashMap,
io::{Error, ErrorKind},
};

use async_trait::async_trait;

use crate::{
aws_s3::AwsS3,
file_backup::backup,
inventory::{Append, Path},
prepare_upload::UploadItem,
};

#[tokio::test]
async fn backup_works_entirely() {
// Setup
let mut upload_items: Vec<UploadItem> = vec![];

let key_name = "key_name";
let upload_item = UploadItem::new("file_path".into(), key_name.to_string());
upload_items.push(upload_item);

let mut map: HashMap<String, bool> = HashMap::new();
map.insert(key_name.to_string(), true);

let filtered = filter::filter(backup.excluded_extensions(), source);
let mut map2: HashMap<String, bool> = HashMap::new();
map2.insert(format!("{}\n", key_name.to_string()), true);

let prepared =
prepare_upload::prepare(&filtered, &inventory, backup.source_directory_path());
let fake_aws_s3 = TestS3Client { map };
let fake_append = TestInventoryPath { map: map2 };

let uploaded = uploader::upload(aws_s3, &prepared, backup.s3_bucket(), &inv_path).await;
// Action
let uploaded = backup(&fake_aws_s3, &upload_items, "s3_bucket", &fake_append).await;

if log_enabled!(Level::Info) {
log_metric(
backup.source_directory_path(),
backup.s3_bucket(),
filtered.len(),
inventory.len(),
prepared.len(),
uploaded.len(),
)
// Test
assert_eq!(uploaded, 1);
}

#[tokio::test]
async fn backup_partially_works_when_s3_fails() {
// Setup
let mut upload_items: Vec<UploadItem> = vec![];

let mut s3_map: HashMap<String, bool> = HashMap::new();
let mut append_map: HashMap<String, bool> = HashMap::new();

for n in (0..2).step_by(1) {
s3_map.insert(n.to_string(), n % 2 == 0); // s3 fails at on odd n
append_map.insert(format!("{}\n", n.to_string()), true);

let upload_item = UploadItem::new("file_path".into(), n.to_string());
upload_items.push(upload_item);
}

let fake_aws_s3 = TestS3Client { map: s3_map };
let fake_append = TestInventoryPath { map: append_map };

// Action
let uploaded = backup(&fake_aws_s3, &upload_items, "s3_bucket", &fake_append).await;

// Test
assert_eq!(uploaded, 1);
}

if log_enabled!(Level::Info) {
info!("Backup complete!");
#[tokio::test]
async fn backup_partially_works_when_inventory_append_fails() {
// Setup
let mut upload_items: Vec<UploadItem> = vec![];

let mut s3_map: HashMap<String, bool> = HashMap::new();
let mut append_map: HashMap<String, bool> = HashMap::new();

for n in (0..2).step_by(1) {
s3_map.insert(n.to_string(), true);
append_map.insert(format!("{}\n", n.to_string()), n % 2 == 0); // append fails at odd n

let upload_item = UploadItem::new("file_path".into(), n.to_string());
upload_items.push(upload_item);
}

let fake_aws_s3 = TestS3Client { map: s3_map };
let fake_append = TestInventoryPath { map: append_map };

// Action
let uploaded = backup(&fake_aws_s3, &upload_items, "s3_bucket", &fake_append).await;

// Test
assert_eq!(uploaded, 1);
}
}

fn log_backing_up(dir: &str, bucket: &str, inventory: &str) {
info!("---- Backing up dir: {} --> bucket: {} ----", dir, bucket);
info!("---- Using inventory file : {}", inventory);
}
struct TestS3Client {
map: HashMap<String, bool>,
}

fn log_metric(
dir: &str,
bucket: &str,
source: usize,
inv: usize,
prepared: usize,
uploaded: usize,
) {
info!("---- {} --> bucket: {} ----", dir, bucket);
info!(
"---- Metric source-filtered: {}, inventory: {}, prepared: {}, uploaded: {} ----",
source, inv, prepared, uploaded
);
}
#[async_trait]
impl AwsS3 for TestS3Client {
async fn put_object(&self, _bucket: &str, key: &str, _file: &str) -> bool {
if let Some(&b) = self.map.get(key) {
return b;
}
false
}
}

struct TestInventoryPath {
map: HashMap<String, bool>,
}

impl Path for TestInventoryPath {
fn get_path(&self) -> String {
todo!()
}
}

/// Log custom error
fn log_error(msg: &str, err: &str) {
error!("{}", msg);
error!("{}", err);
impl Append for TestInventoryPath {
fn append(&self, new_content: &String) -> Result<(), Error> {
if let Some(&val) = self.map.get(new_content.as_str()) {
if val {
return Ok(());
}
}
Err(Error::new(ErrorKind::Other, "uh-oh"))
}
}
}
1 change: 1 addition & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod aws_s3;
pub mod backup;
pub mod file_backup;
pub mod filter;
pub mod inventory;
Expand Down
4 changes: 2 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use aws_sdk_s3::Client;
use log::error;
use spear::{aws_s3::S3Client, file_backup, settings::Settings};
use spear::{aws_s3::S3Client, backup, settings::Settings};
use std::process;

#[tokio::main]
Expand All @@ -16,5 +16,5 @@ async fn main() {
let client = Client::new(&config);
let s3_client = S3Client { s3: client };

file_backup::run(settings, &s3_client).await;
backup::run(settings, &s3_client).await;
}
Loading

0 comments on commit 4eeff7f

Please sign in to comment.