diff --git a/src/auth_google.py b/src/auth_google.py index 62fd04b..a8d9867 100644 --- a/src/auth_google.py +++ b/src/auth_google.py @@ -8,6 +8,9 @@ from google.auth.credentials import Credentials from google.auth.transport.requests import Request from google_auth_oauthlib.flow import InstalledAppFlow from googleapiclient.discovery import Resource, build +from logger import setup_logger + +logger = setup_logger(__name__) SCOPES: List[str] = ["https://www.googleapis.com/auth/calendar"] @@ -23,16 +26,20 @@ def authenticate_google() -> Resource: """ creds: Credentials | None = None if os.path.exists("token.pickle"): + logger.debug("Loading existing credentials from token.pickle") with open("token.pickle", "rb") as token: creds = pickle.load(token) if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: + logger.info("Refreshing expired credentials") creds.refresh(Request()) else: + logger.info("Starting new OAuth2 flow") flow = InstalledAppFlow.from_client_secrets_file("credentials.json", SCOPES) creds = flow.run_local_server(port=0) + logger.debug("Saving credentials to token.pickle") with open("token.pickle", "wb") as token: pickle.dump(creds, token) @@ -52,11 +59,16 @@ def search_calendar_id(service: Resource, calendar_name: str) -> str: Raises: ValueError: If the target calendar is not found. """ + logger.info(f"Searching for calendar: {calendar_name}") calendars_result = service.calendarList().list().execute() calendars = calendars_result.get("items", []) + logger.debug(f"Found {len(calendars)} calendars in total") for calendar in calendars: if calendar["summary"].lower() == calendar_name.lower(): + logger.info(f"Found matching calendar with ID: {calendar['id']}") return calendar["id"] - raise ValueError(f"No calendar named '{calendar_name}' found.") + error_msg = f"No calendar named '{calendar_name}' found" + logger.error(error_msg) + raise ValueError(error_msg) diff --git a/src/caldav_client.py b/src/caldav_client.py index c0451c7..7739780 100644 --- a/src/caldav_client.py +++ b/src/caldav_client.py @@ -5,6 +5,9 @@ from typing import Any, Dict from caldav import Calendar as CalDAVCalendar from caldav import DAVClient, Principal from icalendar import Calendar +from logger import setup_logger + +logger = setup_logger(__name__) EventDict = Dict[str, Any] EventsDict = Dict[str, EventDict] @@ -21,8 +24,11 @@ def connect_to_caldav(url: str, username: str, password: str) -> Principal: Returns: Principal: The authenticated CalDAV principal object. """ + logger.info(f"Connecting to CalDAV server at {url}") client = DAVClient(url, username=username, password=password) - return client.principal() + principal = client.principal() + logger.info("Successfully connected to CalDAV server") + return principal def get_calendar(principal: Principal, calendar_name: str) -> CalDAVCalendar: @@ -38,15 +44,22 @@ def get_calendar(principal: Principal, calendar_name: str) -> CalDAVCalendar: Raises: ValueError: If no matching calendar is found. """ + logger.info("Retrieving list of calendars") calendars = principal.calendars() if not calendars: - raise ValueError("No calendars found on the server.") + error_msg = "No calendars found on the server" + logger.error(error_msg) + raise ValueError(error_msg) + logger.debug(f"Found {len(calendars)} calendars") for cal in calendars: if cal.name.lower() == calendar_name.lower(): + logger.info(f"Found matching calendar: {cal.name}") return cal - raise ValueError(f"No calendar named '{calendar_name}' found.") + error_msg = f"No calendar named '{calendar_name}' found" + logger.error(error_msg) + raise ValueError(error_msg) def _process_exdate(exdate: Any) -> list: @@ -87,11 +100,16 @@ def fetch_events(calendar: CalDAVCalendar) -> EventsDict: Returns: EventsDict: Dictionary of events indexed by their UIDs. """ + logger.info(f"Fetching events from calendar: {calendar.name}") events: EventsDict = {} + event_count = 0 + recurrence_count = 0 + for event in calendar.events(): ical = Calendar.from_ical(event.data) for component in ical.walk(): if component.name == "VEVENT": + event_count += 1 uid = str(component.get("UID")) dtstart = component.get("DTSTART") dtend = component.get("DTEND") @@ -102,6 +120,7 @@ def fetch_events(calendar: CalDAVCalendar) -> EventsDict: recurrence_id = component.get("RECURRENCE-ID") if recurrence_id: uid = f"{uid}-{recurrence_id.dt.isoformat()}" + recurrence_count += 1 description = component.get("DESCRIPTION") description = str(description) if description else "" @@ -123,4 +142,5 @@ def fetch_events(calendar: CalDAVCalendar) -> EventsDict: "google_event_id": None, } + logger.info(f"Retrieved {event_count} events ({recurrence_count} recurring instances) from CalDAV calendar") return events diff --git a/src/logger.py b/src/logger.py new file mode 100644 index 0000000..4a37735 --- /dev/null +++ b/src/logger.py @@ -0,0 +1,30 @@ +"""Module for setting up logging configuration.""" + +import logging + +# Configure logging format +LOGFORMAT = "%(asctime)s - %(levelname)s - %(message)s" +DATEFORMAT = "%Y-%m-%d %H:%M:%S" + + +def setup_logger(name: str) -> logging.Logger: + """Set up and return a logger instance with console output. + + Args: + name: Name of the logger, typically __name__ from the calling module + + Returns: + logging.Logger: Configured logger instance + """ + logger = logging.getLogger(name) + + if not logger.handlers: + logger.setLevel(logging.INFO) + + formatter = logging.Formatter(LOGFORMAT, DATEFORMAT) + + console_handler = logging.StreamHandler() + console_handler.setFormatter(formatter) + logger.addHandler(console_handler) + + return logger diff --git a/src/main.py b/src/main.py index fc43df1..1f45e72 100644 --- a/src/main.py +++ b/src/main.py @@ -5,6 +5,7 @@ import os from auth_google import authenticate_google, search_calendar_id from caldav_client import connect_to_caldav, fetch_events, get_calendar from dotenv import load_dotenv +from logger import setup_logger from sync_logic import ( add_event_to_google, compare_events, @@ -14,6 +15,8 @@ from sync_logic import ( save_local_sync, ) +logger = setup_logger(__name__) + load_dotenv() LOCAL_SYNC_FILE = "calendar_sync.json" @@ -27,43 +30,50 @@ GOOGLE_CALENDAR_NAME = os.getenv("GOOGLE_CALENDAR_NAME") def main() -> None: """Run the calendar synchronization process.""" try: - print("Authenticating with Google Calendar...") + logger.info("Starting calendar synchronization process") + + logger.info("Authenticating with Google Calendar...") service = authenticate_google() google_calendar_id = search_calendar_id(service, GOOGLE_CALENDAR_NAME) + logger.info("Successfully authenticated with Google Calendar") - print("Connecting to CalDAV server...") + logger.info("Connecting to CalDAV server...") principal = connect_to_caldav(CALDAV_URL, CALDAV_USERNAME, CALDAV_PASSWORD) caldav_calendar = get_calendar(principal, CALDAV_CALENDAR_NAME) + logger.info("Successfully connected to CalDAV server") - print(f"Fetching events from CalDAV calendar: {caldav_calendar.name}") + logger.info(f"Fetching events from CalDAV calendar: {caldav_calendar.name}") server_events = fetch_events(caldav_calendar) + logger.info(f"Retrieved {len(server_events)} events from CalDAV") - print("Loading local sync data...") + logger.info("Loading local sync data...") local_events = load_local_sync(LOCAL_SYNC_FILE) + logger.info(f"Loaded {len(local_events)} events from local sync file") - print("Comparing events...") + logger.info("Comparing events...") new_events, updated_events, deleted_events = compare_events(local_events, server_events) - print(f"Adding/Updating {len(new_events) + len(updated_events)} events to Google Calendar...") + logger.info(f"Adding {len(new_events)} new events and updating {len(updated_events)} events in Google Calendar") for event in new_events + updated_events: add_event_to_google(service, event, google_calendar_id) - print(f"Deleting {len(deleted_events)} events from Google Calendar...") + logger.info(f"Deleting {len(deleted_events)} events from Google Calendar") for event in deleted_events: delete_event_from_google(service, event, google_calendar_id) - print("Saving updated sync data...") + logger.info("Saving updated sync data...") save_local_sync(LOCAL_SYNC_FILE, server_events) - print("Sync process completed successfully.") + logger.info("Sync process completed successfully") if error_events: - print("The following events encountered errors during sync:") + logger.warning("The following events encountered errors during sync:") for event in error_events: - print(event) + logger.warning(f"Failed event: {event['summary']} (UID: {event['uid']})") except Exception as e: - print(f"Error occurred during sync: {e}") + logger.error(f"Error occurred during sync: {str(e)}", exc_info=True) + raise if __name__ == "__main__": diff --git a/src/sync_logic.py b/src/sync_logic.py index bdbeac8..ed9d0ce 100644 --- a/src/sync_logic.py +++ b/src/sync_logic.py @@ -7,11 +7,14 @@ from datetime import datetime from typing import Any, Dict, List, Tuple from googleapiclient.discovery import Resource +from logger import setup_logger + +logger = setup_logger(__name__) EventDict = Dict[str, Any] EventsDict = Dict[str, EventDict] -error_events = [] +error_events: List[EventDict] = [] def _sanitize_event_for_json(event_data: Dict[str, Any]) -> Dict[str, Any]: @@ -22,9 +25,6 @@ def _sanitize_event_for_json(event_data: Dict[str, Any]) -> Dict[str, Any]: Returns: Dict[str, Any]: Sanitized dictionary. - - Raises: - TypeError: If the event data is not JSON serializable. """ sanitized = event_data.copy() @@ -38,53 +38,6 @@ def _sanitize_event_for_json(event_data: Dict[str, Any]) -> Dict[str, Any]: return sanitized -def load_local_sync(file_path: str) -> EventsDict: - """Load the locally synced events from a JSON file. - - Args: - file_path: Path to the JSON file. - - Returns: - EventsDict: Dictionary of previously synced events. - """ - if not os.path.exists(file_path): - return {} - with open(file_path, "r") as file: - return json.load(file) - - -def save_local_sync(file_path: str, events: EventsDict) -> None: - """Save the events to the local sync JSON file. - - Args: - file_path: Path to the JSON file. - events: Dictionary of events to save. - """ - sanitized_events = {} - for event_id, event_data in events.items(): - try: - sanitized_events[event_id] = _sanitize_event_for_json(event_data) - except Exception as e: - print(f"Warning: Could not sanitize event {event_id}: {e}") - continue - - try: - with open(file_path, "w") as file: - json.dump(sanitized_events, file, indent=4) - except TypeError as e: - print(f"Error saving events: {e}") - for event_id, event_data in sanitized_events.items(): - try: - json.dumps(event_data) - except TypeError as e: - print(f"Problem found in event {event_id}: {e}") - for key, value in event_data.items(): - try: - json.dumps({key: value}) - except TypeError as e: - print(f" Problem field: {key} = {value} (type: {type(value)})") - - def compare_events( local_events: EventsDict, server_events: EventsDict, @@ -102,20 +55,101 @@ def compare_events( updated_events: List[EventDict] = [] deleted_events: List[EventDict] = [] + logger.info(f"Comparing {len(server_events)} server events with {len(local_events)} local events") + + # Find new and updated events for uid, event in server_events.items(): if uid not in local_events: + logger.debug(f"New event found: {event['summary']} (UID: {uid})") new_events.append(event) elif event["last_modified"] != local_events[uid].get("last_modified"): + logger.debug(f"Modified event found: {event['summary']} (UID: {uid})") event["google_event_id"] = local_events[uid].get("google_event_id") updated_events.append(event) - for uid in local_events: + # Find deleted events + for uid, event in local_events.items(): if uid not in server_events: - deleted_events.append(local_events[uid]) + logger.debug(f"Deleted event found: {event['summary']} (UID: {uid})") + deleted_events.append(event) + logger.info( + f"Found {len(new_events)} new events, {len(updated_events)} modified events, " + f"and {len(deleted_events)} deleted events", + ) return new_events, updated_events, deleted_events +def load_local_sync(file_path: str) -> EventsDict: + """Load the locally synced events from a JSON file. + + Args: + file_path: Path to the JSON file. + + Returns: + EventsDict: Dictionary of previously synced events. + """ + logger.info(f"Loading local sync data from {file_path}") + if not os.path.exists(file_path): + logger.info("No existing sync file found, starting fresh") + return {} + + try: + with open(file_path, "r") as file: + events = json.load(file) + logger.info(f"Successfully loaded {len(events)} events from local sync file") + return events + except json.JSONDecodeError as e: + logger.error(f"Error decoding JSON from {file_path}: {str(e)}") + return {} + except Exception as e: + logger.error(f"Unexpected error loading sync file: {str(e)}") + return {} + + +def save_local_sync(file_path: str, events: EventsDict) -> None: + """Save the events to the local sync JSON file. + + Args: + file_path: Path to the JSON file. + events: Dictionary of events to save. + """ + logger.info(f"Saving {len(events)} events to local sync file") + sanitized_events = {} + + # Sanitize events + for event_id, event_data in events.items(): + try: + sanitized_events[event_id] = _sanitize_event_for_json(event_data) + except Exception as e: + logger.error(f"Failed to sanitize event {event_id} ({event_data.get('summary', 'No summary')}): {str(e)}") + continue + + # Save to file + try: + with open(file_path, "w") as file: + json.dump(sanitized_events, file, indent=4) + logger.info(f"Successfully saved {len(sanitized_events)} events to {file_path}") + except Exception as e: + logger.error(f"Failed to save sync file: {str(e)}") + logger.debug("Attempting to identify problematic events...") + + for event_id, event_data in sanitized_events.items(): + try: + json.dumps(event_data) + except TypeError as e: + logger.error(f"JSON serialization failed for event: {event_id}") + logger.error(f"Event summary: {event_data.get('summary', 'No summary')}") + logger.error(f"Error: {str(e)}") + + # Debug field values + for key, value in event_data.items(): + try: + json.dumps({key: value}) + except TypeError: + logger.error(f"Problematic field: {key} = {value} (type: {type(value)})") + + def add_event_to_google(service: Resource, event: EventDict, calendar_id: str) -> None: """Add a single event to Google Calendar. @@ -124,8 +158,10 @@ def add_event_to_google(service: Resource, event: EventDict, calendar_id: str) - event: Dictionary containing event details. calendar_id: ID of the target Google Calendar. """ + logger.info(f"Processing event: {event['summary']} (UID: {event['uid']})") + try: - print(f"Adding event to Google Calendar: {event['summary']}") + # Prepare event data google_event = { "summary": event["summary"], "description": event.get("description", ""), @@ -134,7 +170,9 @@ def add_event_to_google(service: Resource, event: EventDict, calendar_id: str) - "end": {"dateTime": event["end"], "timeZone": "UTC"}, } + # Handle recurring events if event.get("rrule"): + logger.debug(f"Processing recurring event rules for {event['summary']}") rrule_parts = [] for key, value in event["rrule"].items(): if isinstance(value, list): @@ -143,10 +181,13 @@ def add_event_to_google(service: Resource, event: EventDict, calendar_id: str) - rrule_parts.append(f"{key}={value}") google_event["recurrence"] = [f"RRULE:{';'.join(rrule_parts)}"] + # Handle excluded dates if event.get("exdate"): + logger.debug(f"Processing {len(event['exdate'])} excluded dates") exdates = [f"EXDATE;TZID=UTC:{date}" for date in event["exdate"]] google_event["recurrence"].extend(exdates) + # Add event to Google Calendar created_event = ( service.events() .insert( @@ -157,10 +198,14 @@ def add_event_to_google(service: Resource, event: EventDict, calendar_id: str) - ) event["google_event_id"] = created_event["id"] - print(f"Event created: {created_event.get('htmlLink')}") + logger.info(f"Successfully created event: {event['summary']} (Google ID: {created_event['id']})") + + # Rate limiting time.sleep(0.5) + except Exception as e: - print(f"Failed to add event: {event['summary']} - {e}") + logger.error(f"Failed to add event {event['summary']} (UID: {event['uid']})") + logger.error(f"Error: {str(e)}") error_events.append(event) @@ -174,9 +219,18 @@ def delete_event_from_google(service: Resource, event: EventDict, calendar_id: s """ google_event_id = event.get("google_event_id") if not google_event_id: - raise ValueError(f"Cannot delete event {event['summary']}: missing Google Calendar ID") + error_msg = f"Cannot delete event {event['summary']} (UID: {event['uid']}): missing Google Calendar ID" + logger.error(error_msg) + raise ValueError(error_msg) + try: + logger.info(f"Deleting event: {event['summary']} (Google ID: {google_event_id})") service.events().delete(calendarId=calendar_id, eventId=google_event_id).execute() - print(f"Deleted event: {event['summary']}") + logger.info(f"Successfully deleted event: {event['summary']}") + + # Rate limiting + time.sleep(0.5) + except Exception as e: - print(f"Failed to delete event: {event['summary']} - {e}") + logger.error(f"Failed to delete event: {event['summary']} (UID: {event['uid']})") + logger.error(f"Error: {str(e)}")