notifications

This commit is contained in:
Oli Passey
2025-07-01 14:10:43 +01:00
parent 888a45f59c
commit a05f45a71a
3 changed files with 134 additions and 11 deletions

View File

@@ -75,3 +75,4 @@ examples/
# Old files
*_old.py
*.bak
config.json

View File

@@ -1,5 +1,8 @@
# Use Python 3.11 slim image for smaller size
FROM python:3.11-slim
# Use Python 3.12 slim image for smaller size
FROM python:3.12-slim
# Install cron
RUN apt-get update && apt-get install -y cron && rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /app
@@ -37,13 +40,86 @@ RUN useradd --create-home --shell /bin/bash tracker && \
# Copy application code
COPY . .
# Create necessary directories
RUN mkdir -p /app/logs && \
mkdir -p /app/data && \
chown -R tracker:tracker /app
# Create the daily scraper script
RUN echo '#!/usr/bin/env python3\n\
import sys\n\
import os\n\
import asyncio\n\
import logging\n\
from datetime import datetime\n\
\n\
# Configure logging\n\
logging.basicConfig(\n\
level=logging.INFO,\n\
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",\n\
handlers=[\n\
logging.FileHandler("/var/log/price_scraper.log"),\n\
logging.StreamHandler()\n\
]\n\
)\n\
\n\
logger = logging.getLogger(__name__)\n\
\n\
async def main():\n\
try:\n\
from src.config import Config\n\
from src.database import DatabaseManager\n\
from src.scraper_manager import ScraperManager\n\
\n\
logger.info("Starting scheduled price scraping")\n\
\n\
config = Config()\n\
if config.has_config_error():\n\
logger.error(f"Configuration error: {config.get_config_error()}")\n\
return\n\
\n\
db_manager = DatabaseManager(config.database_path)\n\
scraper_manager = ScraperManager(config)\n\
\n\
products = db_manager.get_all_products()\n\
if not products:\n\
logger.warning("No products found to scrape")\n\
return\n\
\n\
logger.info(f"Scraping {len(products)} products")\n\
results = await scraper_manager.scrape_all_products(products)\n\
\n\
total = sum(len(sites) for sites in results.values())\n\
successful = sum(1 for sites in results.values() for result in sites.values() if result["success"])\n\
\n\
logger.info(f"Scraping complete: {successful}/{total} successful")\n\
\n\
# Save results to database\n\
for product_id, site_results in results.items():\n\
for site_name, result in site_results.items():\n\
if result["success"]:\n\
db_manager.save_price_history(\n\
product_id=product_id,\n\
site_name=site_name,\n\
price=result["price"],\n\
availability=result.get("availability", True),\n\
timestamp=datetime.now()\n\
)\n\
\n\
except Exception as e:\n\
logger.error(f"Scheduled scraping failed: {str(e)}", exc_info=True)\n\
\n\
if __name__ == "__main__":\n\
asyncio.run(main())\n\
' > /app/daily_scraper.py && chmod +x /app/daily_scraper.py
# Switch to non-root user
USER tracker
# Create cron job - runs daily at 8 AM
RUN echo "0 8 * * * cd /app && python daily_scraper.py >> /var/log/cron.log 2>&1" > /etc/cron.d/price-tracker
RUN chmod 0644 /etc/cron.d/price-tracker
RUN crontab /etc/cron.d/price-tracker
# Create startup script
RUN echo '#!/bin/bash\n\
# Start cron in background\n\
cron\n\
# Start web server in foreground\n\
exec python main.py --mode web\n\
' > /app/start.sh && chmod +x /app/start.sh
# Expose port
EXPOSE 5000
@@ -52,5 +128,5 @@ EXPOSE 5000
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:5000/ || exit 1
# Run the application
CMD ["python", "main.py"]
# Run startup script
CMD ["/app/start.sh"]

View File

@@ -53,7 +53,10 @@ class NotificationManager:
# Send email
server = smtplib.SMTP(email_config.get('smtp_server'), email_config.get('smtp_port'))
server.starttls()
server.login(email_config.get('sender_email'), email_config.get('sender_password'))
# Use SMTP credentials from config (may be different from sender email)
smtp_username = email_config.get('smtp_username') or email_config.get('sender_email')
smtp_password = email_config.get('smtp_password') or email_config.get('sender_password')
server.login(smtp_username, smtp_password)
text = msg.as_string()
server.sendmail(email_config.get('sender_email'),
@@ -190,3 +193,46 @@ class NotificationManager:
test_result['webhook']['error'] = str(e)
return test_result
def send_email(self, subject: str, message: str, html_message: str = None) -> bool:
"""Send a simple email notification (synchronous version)."""
email_config = self.notification_config.get('email', {})
if not email_config.get('enabled', False):
logger.warning("Email notifications are disabled")
return False
try:
# Create message
msg = MIMEMultipart('alternative')
msg['From'] = email_config.get('sender_email')
msg['To'] = email_config.get('recipient_email')
msg['Subject'] = subject
# Add text content
if message:
msg.attach(MIMEText(message, 'plain'))
# Add HTML content if provided
if html_message:
msg.attach(MIMEText(html_message, 'html'))
# Send email
server = smtplib.SMTP(email_config.get('smtp_server'), email_config.get('smtp_port'))
server.starttls()
# Use SMTP credentials from config (may be different from sender email)
smtp_username = email_config.get('smtp_username') or email_config.get('sender_email')
smtp_password = email_config.get('smtp_password') or email_config.get('sender_password')
server.login(smtp_username, smtp_password)
text = msg.as_string()
server.sendmail(email_config.get('sender_email'),
email_config.get('recipient_email'), text)
server.quit()
logger.info(f"Email sent successfully: {subject}")
return True
except Exception as e:
logger.error(f"Failed to send email: {e}")
return False