Losing data sucks! This is the way you can automatically backup your MySQL database on Amazon AWS S3 any given time interval (Cronjob). This script uses the S3 library on Github. Main beauty of this function, it creates chronological folders on S3. So you can easily go through your MySQL database backups. I have used .gz extension to save some space but you are welcome to edit it to save as .sql as well. I believe this will be a complete life saver for most of the people. Please let me know, if you develop further, or if you have any question about the script.
Your comments ?
include('../wp-load.php'); BackupDBOnS3("YourSiteDb-Backup/DB/".date('Y')."/".date('m')."/".date('d')); function BackupDBOnS3 ($bucketName){ $user = DB_USER; $pass = DB_PASSWORD; $host = DB_HOST; $db = DB_NAME; $current_time = date('H.i.s', time()); $file_name = "$db-$current_time.sql.gz"; $upload_dir = wp_upload_dir(); $output_file_path = $upload_dir['path']."/".$file_name; $output = `mysqldump --user={$user} --password={$pass} --host={$host} {$db} | gzip > {$output_file_path}`; if (!class_exists('S3')) require_once get_template_directory().'/S3.php'; // AWS access info if (!defined('awsAccessKey')) define('awsAccessKey', 'PutYourAWSAccessKeyHere'); if (!defined('awsSecretKey')) define('awsSecretKey', 'PutYourAWSSecretKeyHere'); // Instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); if ($s3->putObjectFile($output_file_path, $bucketName, $file_name, S3::ACL_PRIVATE)) { print "$file_name : Sucessful!<br>"; } else{ echo "$file_name : Failed to copy file<br>"; } unlink($output_file_path); }