view backend/sqlalchemy/FeedUpdater.py @ 166:04c3b9796b89

feedparser uses the proxy now if one is configured. To implement this the FeedUpdater had to change a bit - sqlalchemy backend is not yet refactored.
author dirk
date Sat, 03 Sep 2011 04:12:35 +0200
parents 86f828096aaf
children a3c945ce434c
line wrap: on
line source


from backend.AbstractFeedUpdater import AbstractFeedUpdater, FeedUpdateException
from datetime import datetime
from Feed import Feed
from FeedEntry import FeedEntry
import feedparser
import logging

log = logging.getLogger("FeedUpdater")

def updateAllFeeds(session):
    allFeeds = findFeedsToUpdate(session)
    for feed in allFeeds:
        try:
            FeedUpdater(session, feed).update()
        except FeedUpdateException, fue:
            log.warn("problems while updating feed " + feed.rss_url + ": " + str(fue))
        session.commit()

def findFeedsToUpdate(session):
    return session.query(Feed).filter(Feed.next_update < datetime.now())

def createNewFeed(url, session):
    # when updating to python3 see http://code.google.com/p/feedparser/issues/detail?id=260
    result = feedparser.parse(url)
    if result.has_key("title"):
        title = result["feed"].title
    else:
        title = url
    newFeed = Feed(title, url)
    session.add(newFeed)

    FeedUpdater(session, newFeed).update(result)


class FeedUpdater(AbstractFeedUpdater):
    def __init__(self, session, feed):
        AbstractFeedUpdater.__init__(self, feed)
        self.session = session

    def _processEntry(self, entry):
        feedEntry = FeedEntry.findById(entry.id, self.session)
        if feedEntry is None:
            self._createFeedEntry(entry)

    def _createFeedEntry(self, entry):
        new = FeedEntry.create(entry)
        new.feed = self.feed
        self.session.add(new)
        log.info("new feed entry: " + entry.title)

    def _incrementFeedUpdateDate(self):
        self.feed.incrementNextUpdateDate()