diff --git a/backup.py b/backup.py index 591d041..48a00bb 100755 --- a/backup.py +++ b/backup.py @@ -3,6 +3,9 @@ import sys import os from datetime import date +import concurrent.futures +import time +import logging def validate_json(data): @@ -19,28 +22,46 @@ def get_encrypt_str(gpg_email, output_file_name): def get_curl_str(encrypt_str, link, bucket_name, output_file_name): return f'curl {link} {encrypt_str} | aws s3 cp - "s3://{bucket_name}/{output_file_name}" --storage-class DEEP_ARCHIVE' +def save_file(curl_str): + os.system(curl_str) + def save_files(type, fmt, download_links, gpg_email, bucket_name): - cmd_used = "" + cmds_to_run = [] for count, link in enumerate(download_links): - file_name=f"{type}_{date.today()}_{count}.{fmt}" + file_name = f"{type}_{date.today()}_{count}.{fmt}" encryptStr = get_encrypt_str(gpg_email, file_name) curl_str = get_curl_str(encryptStr, link, bucket_name, file_name) - os.system(curl_str) - cmd_used = f"{cmd_used} && {curl_str}" - return cmd_used[4:] # "4" removes the first " && " from the command str + cmds_to_run.append(curl_str) + return cmds_to_run def main(): - os.system("chmod +x ./mount_hdd.sh && sudo ./mount_hdd.sh") + mnt_hdd = "mount_hdd.sh" + if os.path.isfile(mnt_hdd): + os.system(f"chmod +x {mnt_hdd} && sudo {mnt_hdd}") + file = open("data.json") data = json.load(file) validate_json(data) - cmd_str_google = save_files("google", data.get("google_fmt"), data.get("google_links"), data.get("gpg_email"), data.get("bucket_name")) - cmd_str_facebook = save_files("facebook", data.get("facebook_fmt"), data.get("facebook_links"), data.get("gpg_email"), data.get("bucket_name")) + cmds_to_run = [] + + cmd_google = save_files("google", data.get("google_fmt"), data.get("google_links"), data.get("gpg_email"), data.get("bucket_name")) + cmds_to_run.extend(cmd_google) + + cmd_facebook = save_files("facebook", data.get("facebook_fmt"), data.get("facebook_links"), data.get("gpg_email"), data.get("bucket_name")) + cmds_to_run.extend(cmd_facebook) + + with concurrent.futures.ThreadPoolExecutor() as executor: + [executor.submit(save_file, param) for param in cmds_to_run] + return { - "google": cmd_str_google, - "facebook": cmd_str_facebook + "google": cmd_google, + "facebook": cmd_facebook } if __name__ == "__main__": + + format = "%(asctime)s: %(message)s" + logging.basicConfig(format=format, level=logging.INFO, + datefmt="%H:%M:%S") print(main()) diff --git a/backup_test.py b/backup_test.py index 4d1e168..3aceac1 100755 --- a/backup_test.py +++ b/backup_test.py @@ -31,20 +31,19 @@ def test_get_encrypt_str(self): @patch('backup.get_encrypt_str') @patch('backup.get_curl_str') - @patch('os.system') - def test_save_files(self, mock_sys, mock_get_curl_str, mock_get_encrypt_str): - mock_sys.return_value = "" + def test_save_files(self, mock_get_curl_str, mock_get_encrypt_str): mock_get_encrypt_str.return_value = "" mock_get_curl_str.return_value = "curl_str" actual = backup.save_files("google", "tgz", ["1", "2"], "test@gmail.com", "bucket_name") - expected = "curl_str && curl_str" + expected = ["curl_str", "curl_str"] self.assertEqual(actual, expected) @patch("json.load") @patch("builtins.open") + @patch("os.path.isfile") @patch('os.system') - def test_integration_test(self, mock_sys, mock_open, mock_json_load): + def test_integration_test(self, mock_sys, mock_isfile, mock_open, mock_json_load): data = """{ "bucket_name": "bucket_name", "gpg_email": "gpg@gmail.com", @@ -56,12 +55,13 @@ def test_integration_test(self, mock_sys, mock_open, mock_json_load): mock_sys.return_value = "" mock_open.return_value = "" + mock_isfile.return_value = False mock_json_load.return_value = json.loads(data) actual = backup.main() expected = { - "google": f'curl www.google.com/0 | gpg --encrypt -r gpg@gmail.com --trust-model always --output google_{date.today()}_0.tgz | aws s3 cp - "s3://bucket_name/google_{date.today()}_0.tgz" --storage-class DEEP_ARCHIVE && curl www.google.com/1 | gpg --encrypt -r gpg@gmail.com --trust-model always --output google_{date.today()}_1.tgz | aws s3 cp - "s3://bucket_name/google_{date.today()}_1.tgz" --storage-class DEEP_ARCHIVE', # noqa: E501 - "facebook": f'curl www.facebook.com/0 | gpg --encrypt -r gpg@gmail.com --trust-model always --output facebook_{date.today()}_0.zip | aws s3 cp - "s3://bucket_name/facebook_{date.today()}_0.zip" --storage-class DEEP_ARCHIVE && curl www.facebook.com/1 | gpg --encrypt -r gpg@gmail.com --trust-model always --output facebook_{date.today()}_1.zip | aws s3 cp - "s3://bucket_name/facebook_{date.today()}_1.zip" --storage-class DEEP_ARCHIVE' # noqa: E501 + "google": [f'curl www.google.com/0 | gpg --encrypt -r gpg@gmail.com --trust-model always --output google_{date.today()}_0.tgz | aws s3 cp - "s3://bucket_name/google_{date.today()}_0.tgz" --storage-class DEEP_ARCHIVE', f'curl www.google.com/1 | gpg --encrypt -r gpg@gmail.com --trust-model always --output google_{date.today()}_1.tgz | aws s3 cp - "s3://bucket_name/google_{date.today()}_1.tgz" --storage-class DEEP_ARCHIVE'], # noqa: E501 + "facebook": [f'curl www.facebook.com/0 | gpg --encrypt -r gpg@gmail.com --trust-model always --output facebook_{date.today()}_0.zip | aws s3 cp - "s3://bucket_name/facebook_{date.today()}_0.zip" --storage-class DEEP_ARCHIVE', f'curl www.facebook.com/1 | gpg --encrypt -r gpg@gmail.com --trust-model always --output facebook_{date.today()}_1.zip | aws s3 cp - "s3://bucket_name/facebook_{date.today()}_1.zip" --storage-class DEEP_ARCHIVE'] # noqa: E501 } self.assertEqual(actual, expected)