Automate Your Daily Tasks with Python: Practical Examples

Table of Contents
Introduction
We all have repetitive tasks that consume valuable time in our daily workflows. Whether it's organizing files, pulling data from websites, processing spreadsheets, or sending routine emails, these mundane tasks can eat away at productivity and creative energy. This is where Python shines as an automation tool.
Python's simplicity, readability, and vast ecosystem of libraries make it the perfect language for automating everyday tasks, even for those with limited programming experience. By investing a little time up front to write automation scripts, you can save hours of manual work in the long run.
In this article, we'll explore practical examples of using Python to automate common tasks. Each example includes working code that you can adapt to your specific needs. Let's dive in and start reclaiming your time!
Automating File Organization
One of the simplest yet most useful automation tasks is organizing files. If you've ever had a downloads folder or desktop cluttered with various file types, you know how time-consuming manual organization can be.
Sorting Files by Type
This script automatically organizes files in a directory by moving them to subdirectories based on file extension:
import os
import shutil
from pathlib import Path
def organize_directory(directory):
# Dictionary mapping file extensions to folder names
extensions_map = {
# Images
'.jpg': 'Images',
'.jpeg': 'Images',
'.png': 'Images',
'.gif': 'Images',
# Documents
'.pdf': 'Documents',
'.doc': 'Documents',
'.docx': 'Documents',
'.txt': 'Documents',
# Audio
'.mp3': 'Audio',
'.wav': 'Audio',
# Video
'.mp4': 'Videos',
'.mov': 'Videos',
# Code
'.py': 'Code',
'.js': 'Code',
'.html': 'Code',
'.css': 'Code',
}
# Create directory path object
directory_path = Path(directory)
# Iterate through each file in the directory
for file_path in directory_path.iterdir():
# Skip if it's a directory
if file_path.is_dir():
continue
# Get the file extension
file_ext = file_path.suffix.lower()
# Skip if we don't have a mapping for this extension
if file_ext not in extensions_map:
continue
# Get the destination folder
destination_folder = directory_path / extensions_map[file_ext]
# Create the destination folder if it doesn't exist
if not destination_folder.exists():
destination_folder.mkdir()
# Move the file
shutil.move(str(file_path), str(destination_folder / file_path.name))
print(f"Moved {file_path.name} to {extensions_map[file_ext]}")
# Example usage
if __name__ == "__main__":
downloads_folder = str(Path.home() / "Downloads")
organize_directory(downloads_folder)
print("Organization complete!")
Cleaning Up Files by Age
This script identifies and optionally deletes files older than a specified number of days:
import os
import time
from datetime import datetime, timedelta
from pathlib import Path
def cleanup_old_files(directory, days_old, delete=False):
"""Find files older than days_old and optionally delete them."""
# Calculate the cutoff date
cutoff_date = datetime.now() - timedelta(days=days_old)
cutoff_timestamp = cutoff_date.timestamp()
directory_path = Path(directory)
old_files = []
# Walk through the directory
for file_path in directory_path.rglob('*'):
if file_path.is_file():
# Get the file's last modification time
mod_time = file_path.stat().st_mtime
# Check if file is older than cutoff
if mod_time < cutoff_timestamp:
old_files.append(file_path)
if delete:
try:
file_path.unlink()
print(f"Deleted: {file_path}")
except Exception as e:
print(f"Error deleting {file_path}: {e}")
else:
modified_time = datetime.fromtimestamp(mod_time).strftime('%Y-%m-%d %H:%M:%S')
print(f"Old file: {file_path} (Modified: {modified_time})")
if not delete:
print(f"\nFound {len(old_files)} files older than {days_old} days.")
print("Run with delete=True to remove these files.")
else:
print(f"\nDeleted {len(old_files)} files older than {days_old} days.")
# Example usage
if __name__ == "__main__":
# List files older than 30 days in the temp directory
temp_dir = str(Path.home() / "temp")
cleanup_old_files(temp_dir, 30, delete=False)
These examples demonstrate how Python can save you hours of manually organizing and cleaning up files. With small tweaks, you can adapt these scripts to fit your specific organization needs.
Web Scraping for Information
Another common task that can benefit from automation is gathering information from websites. Python's libraries like requests
and BeautifulSoup
make web scraping straightforward.
Monitoring Product Prices
This script checks the price of a product on an e-commerce website and sends an email notification if the price drops below a certain threshold:
import requests
from bs4 import BeautifulSoup
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import time
import random
def get_product_price(url, headers):
"""Scrape a product page and extract the price."""
# Add random delay to avoid being blocked
time.sleep(random.uniform(1, 3))
response = requests.get(url, headers=headers)
if response.status_code != 200:
print(f"Failed to fetch page: {response.status_code}")
return None
soup = BeautifulSoup(response.content, 'html.parser')
# This selector will vary based on the website
# Inspect the HTML of the page to find the correct selector
price_element = soup.select_one('span.price')
if price_element:
# Extract the price and convert to float
# Remove currency symbols and commas
price_text = price_element.text.strip()
price = float(''.join(c for c in price_text if c.isdigit() or c == '.'))
return price
return None
def send_price_alert(to_email, product_url, product_name, price):
"""Send an email alert about the price drop."""
# Email configuration
from_email = "[email protected]" # Your email
password = "your-app-password" # Your app password
# Create message
msg = MIMEMultipart()
msg['From'] = from_email
msg['To'] = to_email
msg['Subject'] = f"Price Alert: {product_name}"
body = f"""
Good news! The price of {product_name} has dropped to ${price:.2f}.
Check it out here: {product_url}
This is an automated message.
"""
msg.attach(MIMEText(body, 'plain'))
# Send email
try:
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(from_email, password)
text = msg.as_string()
server.sendmail(from_email, to_email, text)
server.quit()
print("Email alert sent successfully!")
except Exception as e:
print(f"Failed to send email: {e}")
def monitor_product_price(product_url, product_name, target_price, check_interval=3600):
"""Monitor a product price and alert when it drops below target."""
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
'Accept-Language': 'en-US,en;q=0.9',
}
while True:
current_price = get_product_price(product_url, headers)
if current_price is None:
print("Failed to get price. Retrying later...")
elif current_price <= target_price:
print(f"Price drop alert! {product_name} is now ${current_price:.2f}")
send_price_alert("[email protected]", product_url, product_name, current_price)
break
else:
print(f"Current price of {product_name}: ${current_price:.2f} (Target: ${target_price:.2f})")
# Wait for the next check
print(f"Waiting {check_interval // 60} minutes until next check...")
time.sleep(check_interval)
# Example usage
if __name__ == "__main__":
# Replace with the actual product URL
url = "https://example.com/products/item123"
name = "Wireless Headphones"
target = 149.99
# Check price every hour
monitor_product_price(url, name, target, 3600)
Scraping News Headlines
This script fetches the latest news headlines from a website and saves them to a CSV file:
import requests
from bs4 import BeautifulSoup
import csv
from datetime import datetime
def scrape_news_headlines(url, num_headlines=10):
"""Scrape top headlines from a news website."""
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
}
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
except requests.exceptions.RequestException as e:
print(f"Error fetching the URL: {e}")
return []
soup = BeautifulSoup(response.text, 'html.parser')
# This will vary based on the website structure
# Inspect the HTML to find the correct selectors
headlines = []
articles = soup.select('article.news-item')[:num_headlines]
for article in articles:
# Extract headline text and link
headline_element = article.select_one('h2.headline')
link_element = article.select_one('a')
if headline_element and link_element:
headline_text = headline_element.text.strip()
headline_link = link_element.get('href')
# Make sure link is absolute
if headline_link.startswith('/'):
headline_link = f"{url.rstrip('/')}{headline_link}"
headlines.append({
'title': headline_text,
'url': headline_link,
'date': datetime.now().strftime('%Y-%m-%d')
})
return headlines
def save_headlines_to_csv(headlines, filename):
"""Save headlines to a CSV file."""
fieldnames = ['title', 'url', 'date']
try:
with open(filename, 'w', newline='', encoding='utf-8') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(headlines)
print(f"Successfully saved {len(headlines)} headlines to {filename}")
except Exception as e:
print(f"Error saving to CSV: {e}")
# Example usage
if __name__ == "__main__":
news_url = "https://example-news-site.com"
headlines = scrape_news_headlines(news_url, 15)
if headlines:
csv_filename = f"headlines_{datetime.now().strftime('%Y%m%d')}.csv"
save_headlines_to_csv(headlines, csv_filename)
else:
print("No headlines were found.")
Note: When scraping websites, always check the website's terms of service and robots.txt file to ensure you're allowed to scrape their content. Additionally, add delays between requests and use proper headers to avoid being blocked.
Email Automation
Email tasks can be repetitive and time-consuming. Python can automate sending emails, processing incoming messages, and generating reports.
Sending Scheduled Reports
This script generates a weekly report and emails it to a list of recipients:
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.application import MIMEApplication
import pandas as pd
from datetime import datetime, timedelta
import os
def generate_weekly_report():
"""Generate a sample weekly report as a CSV file."""
# This is a placeholder for your actual report generation logic
# In a real scenario, you might pull data from a database
# Create sample data
data = {
'Date': [(datetime.now() - timedelta(days=i)).strftime('%Y-%m-%d') for i in range(7)],
'Sales': [1245, 1435, 1256, 1760, 1555, 1897, 2105],
'Visitors': [345, 389, 412, 390, 423, 501, 489]
}
# Create DataFrame
df = pd.DataFrame(data)
# Save to CSV
report_filename = f"weekly_report_{datetime.now().strftime('%Y%m%d')}.csv"
df.to_csv(report_filename, index=False)
return report_filename
def send_report_email(recipient_list, report_file):
"""Send the weekly report to a list of recipients."""
# Email settings
sender_email = "[email protected]"
password = "your-app-password"
# Current date info
now = datetime.now()
week_start = (now - timedelta(days=now.weekday() + 7)).strftime('%b %d')
week_end = (now - timedelta(days=now.weekday() + 1)).strftime('%b %d, %Y')
# Email content
subject = f"Weekly Report: {week_start} - {week_end}"
body = f"""
Hello Team,
Attached is the weekly report for {week_start} - {week_end}.
Key highlights:
- Overall sales increased by 12% compared to last week
- Website traffic up by 8%
- New product line performing well with 23% conversion rate
Please review the attached report for detailed information.
Best regards,
The Automated Reporting System
"""
# Create message
for recipient in recipient_list:
msg = MIMEMultipart()
msg['From'] = sender_email
msg['To'] = recipient
msg['Subject'] = subject
msg.attach(MIMEText(body, 'plain'))
# Attach the report
with open(report_file, 'rb') as file:
attachment = MIMEApplication(file.read(), _subtype="csv")
attachment.add_header('Content-Disposition', 'attachment', filename=report_file)
msg.attach(attachment)
# Send email
try:
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(sender_email, password)
server.send_message(msg)
server.quit()
print(f"Report sent successfully to {recipient}")
except Exception as e:
print(f"Failed to send email to {recipient}: {e}")
# Example usage
if __name__ == "__main__":
recipients = ["[email protected]", "[email protected]", "[email protected]"]
report_file = generate_weekly_report()
send_report_email(recipients, report_file)
# Clean up the file after sending
os.remove(report_file)
print("Weekly reporting completed!")
Comments (0)