A Python tool that automatically backs up your important files to another folder, external drive, or cloud folder (Google Drive/OneDrive).
๐ฏ What This Backup Script Can Do
โ Automatically copies all files from source โ backup folder
โ Creates a new backup folder with date (optional)
โ Logs which files were copied
โ Can run daily/weekly using scheduler
โ Supports multiple source folders
โ Avoids copying duplicates
โ Shows backup summary
โ Works on Windows, Mac, Linux
๐งฐ Python Modules Used
shutilโ copying filesosโ file pathsdatetimeโ timestamp foldershashlibโ prevents duplicate backupscheduleโ auto backups
All are built-in (no installation needed), except schedule (optional):
pip install schedule
๐ Folder Structure
AutoBackup/
โโโ backup.py
โโโ config.json
โโโ logs.txt
โโโ backups/
๐ config.json Example
{
"source_paths": [
"C:/Users/Sameer/Documents",
"C:/Users/Sameer/Desktop/Projects"
],
"backup_path": "D:/Backups",
"create_daily_folder": true
}
๐งฉ FULL WORKING BACKUP SCRIPT (backup.py)
import os
import shutil
import json
from datetime import datetime
import hashlib
# Load config
config = json.load(open("config.json"))
SOURCE_PATHS = config["source_paths"]
BACKUP_PATH = config["backup_path"]
DAILY_FOLDER = config["create_daily_folder"]
# Create daily backup folder
if DAILY_FOLDER:
today = datetime.now().strftime("%Y-%m-%d")
BACKUP_PATH = os.path.join(BACKUP_PATH, today)
os.makedirs(BACKUP_PATH, exist_ok=True)
def file_hash(path):
"""Return MD5 hash of a file to prevent duplicate backup."""
hasher = hashlib.md5()
with open(path, 'rb') as f:
buf = f.read()
hasher.update(buf)
return hasher.hexdigest()
def backup_files():
logs = []
existing_hashes = set()
# Load existing backup hashes
for root, dirs, files in os.walk(BACKUP_PATH):
for file in files:
full_path = os.path.join(root, file)
existing_hashes.add(file_hash(full_path))
for src in SOURCE_PATHS:
for root, dirs, files in os.walk(src):
for file in files:
src_file = os.path.join(root, file)
# Skip duplicates
if file_hash(src_file) in existing_hashes:
continue
rel_path = os.path.relpath(root, src)
dest_dir = os.path.join(BACKUP_PATH, rel_path)
os.makedirs(dest_dir, exist_ok=True)
dst_file = os.path.join(dest_dir, file)
shutil.copy2(src_file, dst_file)
logs.append(f"Copied: {src_file} โ {dst_file}")
# Save logs
with open("logs.txt", "a") as f:
f.write("\n".join(logs) + "\n")
print("Backup completed.")
print(f"Total files copied: {len(logs)}")
if __name__ == "__main__":
backup_files()
โถ Run Manually
python backup.py
โฒ๏ธ Auto Backup Every Day
Create: schedule_backup.py
import schedule
import time
import os
def run_backup():
os.system("python backup.py")
schedule.every().day.at("09:00").do(run_backup)
while True:
schedule.run_pending()
time.sleep(1)
Runs backup every day at 9 AM.

Leave a Reply