# HG changeset patch # User Dirk Olmes # Date 1314019833 -7200 # Node ID 1e6274cca0356c2b77be0fe5c43a4988a6859f7d # Parent dc0a82841a60ea77aa87fa9bb54654d707105013# Parent 5bb58c956e480bb2faabce05684cfe4b0ccf6733 merge with "backend" branch diff -r dc0a82841a60 -r 1e6274cca035 AddFeed.py --- a/AddFeed.py Sun Aug 21 02:44:13 2011 +0200 +++ b/AddFeed.py Mon Aug 22 15:30:33 2011 +0200 @@ -1,36 +1,27 @@ from PyQt4 import QtGui from Ui_AddFeed import Ui_AddFeed -import FeedUpdater import logging class AddFeed(QtGui.QDialog): - def __init__(self, session): + def __init__(self, backend=None): QtGui.QWidget.__init__(self, None) - self.session = session + self.backend = backend self.exception = None self.ui = Ui_AddFeed() self.ui.setupUi(self) self.ui.url.setFocus() - + def accept(self): try: - self.createFeed() - self.session.commit() + url = self.getUrl() + self.backend.createFeed(url) except AttributeError as ae: logging.getLogger("AddFeed").info(ae) - - self.exception = ae - self.session.rollback() + self.exception = ae QtGui.QDialog.accept(self) - def createFeed(self): - url = self.getUrl() - # TODO get status from feedUpdater and display in status area of the main window - FeedUpdater.createNewFeed(url, self.session) - def getUrl(self): text = self.ui.url.text() # the content of a QLineEdit is a QString, convert it to a Python string return str(text) - \ No newline at end of file diff -r dc0a82841a60 -r 1e6274cca035 Database.py --- a/Database.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,37 +0,0 @@ - -import Mapping -import sqlalchemy -import sqlalchemy.orm -import sys -import util - -# Keep the connection to the database only once. The feed updater and the GUI app will -# operate on a single engine/session but this comes in handy for interactive use -engine = None -SessionMaker = None - -def createSession(databaseUrl=None): - if databaseUrl is None: - databaseUrl = _getDatabaseUrl() - initEngine(databaseUrl) - Mapping.createMapping(engine) - initSessionMaker() - return SessionMaker() - -def _getDatabaseUrl(): - if len(sys.argv) < 2: - print("Usage: %s " % (sys.argv[0])) - sys.exit(1) - return sys.argv[1] - -def initEngine(databaseUrl): - global engine - if engine is None: - verbose = util.databaseLoggingEnabled() - engine = sqlalchemy.create_engine(databaseUrl, echo=verbose) - -def initSessionMaker(): - global SessionMaker - if SessionMaker is None: - SessionMaker = sqlalchemy.orm.sessionmaker(bind=engine) - \ No newline at end of file diff -r dc0a82841a60 -r 1e6274cca035 DisplayModel.py --- a/DisplayModel.py Sun Aug 21 02:44:13 2011 +0200 +++ b/DisplayModel.py Mon Aug 22 15:30:33 2011 +0200 @@ -2,19 +2,25 @@ from PyQt4.QtCore import QAbstractListModel, QModelIndex, QVariant, Qt class DisplayModel(QAbstractListModel): - def __init__(self, parent=None, list=None, displayFunction=None, **args): + def __init__(self, parent=None, list=None, displayAttribute=None, **args): QAbstractListModel.__init__(self, parent, *args) self.list = list - self.displayFunction = displayFunction - + self.displayAttribute = displayAttribute + def rowCount(self, parent=QModelIndex()): return len(self.list) - - def data(self, index, role): + + def data(self, index, role): if index.isValid() and role == Qt.DisplayRole: row = index.row() object = self.list[row] - displayString = self.displayFunction(object) + displayString = self._stringToDisplay(object) return QVariant(displayString) - else: + else: return QVariant() + + def _stringToDisplay(self, object): + if hasattr(object, self.displayAttribute): + return getattr(object, self.displayAttribute) + else: + return "invalid display attribute: " + self.displayAttribute diff -r dc0a82841a60 -r 1e6274cca035 Feed.py --- a/Feed.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,46 +0,0 @@ - -from datetime import datetime, timedelta -import FeedEntry - -class Feed(object): - @staticmethod - def all(session): - return session.query(Feed).order_by(Feed.title).all() - - def __init__(self, title, rss_url): - self.title = title - self.rss_url = rss_url - # default: update every 60 minutes - self.update_interval = 60 - self.incrementNextUpdateDate() - self.auto_load_entry_link = False - self.always_open_in_browser = False - - def __repr__(self): - return "" % (self.pk, self.title) - - def userPresentableString(self): - return self.title - - def entriesSortedByUpdateDate(self, hideReadEntries=False): - if hideReadEntries: - sortedEntries = self._unreadEntries() - else: - sortedEntries = list(self.entries) - sortedEntries.sort(FeedEntry.compareByUpdateDate) - return sortedEntries - - def _unreadEntries(self): - retValue = [] - for entry in self.entries: - if not entry.read: - retValue.append(entry) - return retValue - - def incrementNextUpdateDate(self): - delta = timedelta(minutes=self.update_interval) - self.next_update = datetime.now() + delta - - def markAllEntriesRead(self): - for entry in self.entries: - entry.markRead() \ No newline at end of file diff -r dc0a82841a60 -r 1e6274cca035 FeedEntry.py --- a/FeedEntry.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,43 +0,0 @@ - -from datetime import datetime - -def compareByUpdateDate(first, second): - return cmp(first.updated, second.updated) - -class FeedEntry(object): - @staticmethod - def findById(id, session): - result = session.query(FeedEntry).filter(FeedEntry.id == id) - return result.first() - - @staticmethod - def create(entry): - new = FeedEntry() - new.id = entry.id - new.link = entry.link - new.title = entry.title - new.updated = entry.updated_parsed - new.summary = entry.summary - return new - - def __init__(self): - self.create_timestamp = datetime.now() - self.read = 0 - - def __repr__(self): - return "" % (self.pk, self.title) - - def userPresentableString(self): - return self.title - - def toggleRead(self): - if self.read: - self.markUnread() - else: - self.markRead() - - def markRead(self): - self.read = 1 - - def markUnread(self): - self.read = 0 diff -r dc0a82841a60 -r 1e6274cca035 FeedList.py --- a/FeedList.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,23 +0,0 @@ - -from Feed import Feed -from FeedEntry import FeedEntry -from Preferences import Preferences -from sqlalchemy.orm import joinedload - -def getFeeds(session): - preferences = Preferences(session) - if preferences.showOnlyUnreadFeeds(): - return _getUnreadFeeds(session) - else: - return Feed.all(session) - -def _getUnreadFeeds(session): - query = session.query(FeedEntry).filter(FeedEntry.read == 0) - queryWithOptions = query.options(joinedload("feed")) - result = queryWithOptions.all() - return _collectFeeds(result) - -def _collectFeeds(feedEntries): - feeds = [entry.feed for entry in feedEntries] - uniqueFeeds = set(feeds) - return list(uniqueFeeds) diff -r dc0a82841a60 -r 1e6274cca035 FeedSettings.py --- a/FeedSettings.py Sun Aug 21 02:44:13 2011 +0200 +++ b/FeedSettings.py Mon Aug 22 15:30:33 2011 +0200 @@ -3,10 +3,15 @@ from Ui_FeedSettings import Ui_FeedSettings class FeedSettings(QtGui.QDialog): - def __init__(self, session, feed): + """ + Copy all feed properties into the GUI on initialization. Collect all changes + in a separate dict that's passed into the backend along with the feed to modify. + """ + def __init__(self, feed, backend): QtGui.QWidget.__init__(self, None) - self.session = session self.feed = feed + self.backend = backend + self.changes = {} self.ui = Ui_FeedSettings() self.ui.setupUi(self) self.initUi() @@ -19,29 +24,24 @@ def editingTitleFinished(self): title = str(self.ui.feedTitle.text()) - self.feed.title = title + self.changes["title"] = title def editingUpdateIntervalFinished(self): updateInterval = int(str(self.ui.updateInterval.text())) - self.feed.update_interval = updateInterval - self.feed.incrementNextUpdateDate() + self.changes["update_interval"] = updateInterval def autoLoadArticleChanged(self, change): if change: - self.feed.auto_load_entry_link = True + self.changes["auto_load_entry_link"] = True else: - self.feed.auto_load_entry_link = False + self.changes["auto_load_entry_link"] = False def alwaysOpenInExternalBrowser(self, change): if change: - self.feed.always_open_in_browser = True + self.changes["always_open_in_browser"] = True else: - self.feed.always_open_in_browser = False + self.changes["always_open_in_browser"] = False def accept(self): - self.session.commit() + self.backend.updateFeed(self.feed, self.changes) QtGui.QDialog.accept(self) - - def reject(self): - self.session.rollback() - QtGui.QDialog.reject(self) diff -r dc0a82841a60 -r 1e6274cca035 FeedUpdater.py --- a/FeedUpdater.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,83 +0,0 @@ - -from datetime import datetime -from Feed import Feed -from FeedEntry import FeedEntry -import feedparser -import logging - -STATUS_ERROR = 400 -log = logging.getLogger("FeedUpdater") - -def updateAllFeeds(session): - allFeeds = findFeedsToUpdate(session) - for feed in allFeeds: - try: - FeedUpdater(session, feed).update() - except FeedUpdateException, fue: - log.warn("problems while updating feed " + feed.rss_url + ": " + str(fue)) - session.commit() - -def findFeedsToUpdate(session): - return session.query(Feed).filter(Feed.next_update < datetime.now()) - -def createNewFeed(url, session): - # when updating to python3 see http://code.google.com/p/feedparser/issues/detail?id=260 - result = feedparser.parse(url) - if result.has_key("title"): - title = result["feed"].title - else: - title = url - newFeed = Feed(title, url) - session.add(newFeed) - - FeedUpdater(session, newFeed).update() - -def normalize(entry): - if not hasattr(entry, "id"): - entry.id = entry.link - if not hasattr(entry, "updated_parsed"): - entry.updated_parsed = datetime.today() - else: - entry.updated_parsed = datetime(*entry.updated_parsed[:6]) - if not hasattr(entry, "summary"): - if hasattr(entry, "content"): - entry.summary = entry.content[0].value - else: - entry.summary = "" - -class FeedUpdater(object): - def __init__(self, session, feed): - self.session = session - self.feed = feed - - def update(self): - log.info("updating " + self.feed.rss_url) - result = self.getFeed() - for entry in result.entries: - self.processEntry(entry) - self.feed.incrementNextUpdateDate() - - def getFeed(self): - result = feedparser.parse(self.feed.rss_url) - # bozo flags if a feed is well-formed. -# if result["bozo"] > 0: -# raise FeedUpdateException() - status = result["status"] - if status >= STATUS_ERROR: - raise FeedUpdateException("HTTP status " + str(status)) - return result - - def processEntry(self, entry): - normalize(entry) - feedEntry = FeedEntry.findById(entry.id, self.session) - if feedEntry is None: - self.createFeedEntry(entry) - - def createFeedEntry(self, entry): - new = FeedEntry.create(entry) - new.feed = self.feed - self.session.add(new) - log.info("new feed entry: " + entry.title) - -class FeedUpdateException(Exception): - pass diff -r dc0a82841a60 -r 1e6274cca035 MainWindow.py --- a/MainWindow.py Sun Aug 21 02:44:13 2011 +0200 +++ b/MainWindow.py Mon Aug 22 15:30:33 2011 +0200 @@ -1,35 +1,32 @@ from AddFeed import AddFeed from DisplayModel import DisplayModel -from Feed import Feed from FeedEntryItemDelegate import FeedEntryItemDelegate from FeedEntryTableModel import FeedEntryTableModel from FeedSettings import FeedSettings -from Preferences import Preferences from PreferencesDialog import PreferencesDialog from PyQt4.QtCore import QUrl from PyQt4.QtGui import QApplication from PyQt4.QtGui import QMainWindow from PyQt4.QtGui import QWidget from Ui_MainWindow import Ui_MainWindow -import FeedList import subprocess STATUS_MESSAGE_DISPLAY_MILLIS = 20000 class MainWindow(QMainWindow): - def __init__(self, session=None): + def __init__(self, backend=None): QWidget.__init__(self, None) - self.session = session - self.preferences = Preferences(session) + self.backend = backend + self.preferences = backend.preferences() self.ui = Ui_MainWindow() self.ui.setupUi(self) self.updateFeedList() self.initFeedEntryList() def updateFeedList(self): - self.allFeeds = FeedList.getFeeds(self.session) - feedModel = DisplayModel(self, self.allFeeds, Feed.userPresentableString) + self.allFeeds = self.backend.getFeeds() + feedModel = DisplayModel(self, self.allFeeds, "title") self.ui.feedList.setModel(feedModel) self.ui.feedList.update() @@ -120,16 +117,15 @@ subprocess.Popen([browser, self.selectedEntry.link]) def toggleReadOnSelectedEntry(self): - self.selectedEntry.toggleRead() - self.session.commit() + self.backend.toggleRead(self.selectedEntry) self.ui.feedEntryList.doItemsLayout() def markSelectedFeedRead(self): - self.selectedFeed.markAllEntriesRead() - self.session.commit() + self.backend.markAllEntriesRead(self.selectedFeed) self.ui.feedEntryList.doItemsLayout() def markSelectedEntriesRead(self): + entriesToBeMarked = [] selectedIndexes = self.ui.feedEntryList.selectedIndexes() for index in selectedIndexes: # selectedIndexes returns one QModelIndex instance per row/column combo. @@ -137,15 +133,13 @@ # column if index.column() == 0: row = index.row() - # use selectedEntry here to ensure it has a valid state after all - # selected entries are marked read - self.selectedEntry = self.feedEntries[row] - self.selectedEntry.markRead() - self.session.commit() + entriesToBeMarked.append(self.feedEntries[row]) + self.backend.markFeedEntriesAsRead(entriesToBeMarked) + self.selectedEntry = None self.ui.feedEntryList.doItemsLayout() def addFeed(self): - addFeed = AddFeed(self.session) + addFeed = AddFeed(self.backend) success = addFeed.exec_() if not success: return @@ -154,32 +148,29 @@ message = "Error while adding feed: " + str(addFeed.exception) self._updateStatusBar(message) else: - self.session.commit() self.updateFeedList() def deleteFeed(self): try: - self.session.delete(self.selectedFeed) - self.session.commit() + self.backend.deleteFeed(self.selectedFeed) self.updateFeedList() except Exception as exception: message = "Error while deleting feed: " + str(exception) self._updateStatusBar(message) def showPreferences(self): - preferences = PreferencesDialog(self.session) + preferences = PreferencesDialog(self.backend) preferences.exec_() def showFeedSettings(self): - feedSettings = FeedSettings(self.session, self.selectedFeed) + feedSettings = FeedSettings(self.selectedFeed, self.backend) feedSettings.exec_() def _updateStatusBar(self, message): self.ui.statusbar.showMessage(message, STATUS_MESSAGE_DISPLAY_MILLIS) def close(self): - # save all uncommitted state, just in case - self.session.commit() + self.backend.dispose() QMainWindow.close(self) def copyArticleURLToClipboard(self): diff -r dc0a82841a60 -r 1e6274cca035 Mapping.py --- a/Mapping.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,75 +0,0 @@ - -from Feed import Feed -from FeedEntry import FeedEntry -from Preference import Preference -from sqlalchemy import Boolean -from sqlalchemy import Column -from sqlalchemy import DateTime -from sqlalchemy import ForeignKey -from sqlalchemy import Integer -from sqlalchemy import MetaData -from sqlalchemy import String -from sqlalchemy import Table -from sqlalchemy import Text -from sqlalchemy.orm import mapper -from sqlalchemy.orm import relation - -mappingDefined = False -feedEntryTable = None - -def createMapping(engine): - """ Make sure the mapping is defined only once. This is not really needed for the feed updater - or the GUI app but comes in handy when working interactively with the system. """ - global mappingDefined - if not mappingDefined: - _createMapping(engine) - mappingDefined = True - -def _createMapping(engine): - metadata = MetaData(engine) - metadata.bind = engine - - feedTable = Table("feed", metadata, - Column("pk", Integer, primary_key=True), - Column("title", String(255), nullable=False), - Column("rss_url", String(255), nullable=False), - # update interval is specified in minutes - Column("update_interval", Integer, nullable=False), - Column("next_update", DateTime, nullable=False), - # when displaying an entry of this feed, do not display the summary but rather load - # the link directly - Column("auto_load_entry_link", Boolean, nullable=False), - # this is actually a hack: when opening some sites in the QWebView it just crashes. - # This setting forces to open an entry's link in the external browser - Column("always_open_in_browser", Boolean, nullable=False) - ) - - global feedEntryTable - feedEntryTable = Table("feed_entry", metadata, - Column("pk", Integer, primary_key=True), - Column("create_timestamp", DateTime, nullable=False), - Column("read", Integer, nullable=False), - - Column("id", String(512), nullable=False), - Column("link", String(512), nullable=False), - Column("title", Text, nullable=False), - Column("summary", Text, nullable=False), - Column("updated", DateTime), - Column("feed_id", Integer, ForeignKey("feed.pk")) - ) - - preferencesTable = Table("preference", metadata, - Column("pk", Integer, primary_key=True), - Column("key", String(255), nullable=False), - Column("value", String(255), nullable=False) - ) - - metadata.create_all() - - mapper(FeedEntry, feedEntryTable) - mapper(Feed, feedTable, - properties = { - "entries" : relation(FeedEntry, backref="feed", lazy=True, cascade="delete, delete-orphan") - } - ) - mapper(Preference, preferencesTable) diff -r dc0a82841a60 -r 1e6274cca035 Preference.py --- a/Preference.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,12 +0,0 @@ - -class Preference(object): - @staticmethod - def forKey(key, session): - return session.query(Preference).filter(Preference.key == key).first() - - def __init__(self, key, value): - self.key = key - self.value = value - - def __repr__(self): - return "" % (self.key, self.value) diff -r dc0a82841a60 -r 1e6274cca035 Preferences.py --- a/Preferences.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,92 +0,0 @@ - -from Preference import Preference -import util - -DAYS_TO_KEEP_FEED_ENTRIES = "DAYS_TO_KEEP_FEED_ENTRIES" -HIDE_READ_ENTRIES = "HIDE_READ_FEED_ENTRIES" -PROXY_HOST = "PROXY_HOST" -PROXY_PORT = "PROXY_PORT" -SHOW_ONLY_UNREAD_FEEDS = "SHOW_ONLY_UNREAD_FEEDS" -START_MAXIMIZED = "START_MAXIMIZED" - -class Preferences(object): - def __init__(self, session): - self.session = session - self.cache = {} - - def _cachedPreference(self, key, defaultValue=None, addIfMissing=True): - if self.cache.has_key(key): - return self.cache[key] - else: - pref = Preference.forKey(key, self.session) - if pref is not None: - self.cache[key] = pref - elif pref is None and addIfMissing: - pref = Preference(key, str(defaultValue)) - self.session.add(pref) - self.cache[key] = pref - return pref - - def startMaximized(self): - pref = self._cachedPreference(START_MAXIMIZED, False) - return util.str2bool(pref.value) - - def setStartMaximized(self, flag): - pref = self._cachedPreference(START_MAXIMIZED) - pref.value = util.bool2str(flag) - - def hideReadFeedEntries(self): - pref = self._cachedPreference(HIDE_READ_ENTRIES, False) - return util.str2bool(pref.value) - - def setHideReadFeedEntries(self, flag): - pref = self._cachedPreference(HIDE_READ_ENTRIES) - pref.value = util.bool2str(flag) - - def showOnlyUnreadFeeds(self): - pref = self._cachedPreference(SHOW_ONLY_UNREAD_FEEDS, False) - return util.str2bool(pref.value) - - def setShowOnlyUnreadFeeds(self, flag): - pref = self._cachedPreference(SHOW_ONLY_UNREAD_FEEDS) - pref.value = util.bool2str(flag) - - def proxyHost(self): - pref = self._cachedPreference(PROXY_HOST) - return pref.value - - def setProxyHost(self, hostname): - if hostname is None: - pref = self._cachedPreference(PROXY_HOST, addIfMissing=False) - if pref is not None: - self.session.delete(pref) - del(self.cache[PROXY_HOST]) - else: - pref = self._cachedPreference(PROXY_HOST) - pref.value = str(hostname) - - def proxyPort(self): - pref = self._cachedPreference(PROXY_PORT, 3128) - return int(pref.value) - - def setProxyPort(self, port): - if port is None: - pref = self._cachedPreference(PROXY_PORT, addIfMissing=False) - if pref is not None: - self.session.delete(pref) - del(self.cache[PROXY_PORT]) - else: - pref = self._cachedPreference(PROXY_PORT) - pref.value = str(port) - - def isProxyConfigured(self): - pref = self._cachedPreference(PROXY_HOST, addIfMissing=False) - return pref is not None - - def daysToKeepFeedEntries(self): - pref = self._cachedPreference(DAYS_TO_KEEP_FEED_ENTRIES, 90, addIfMissing=True) - return int(pref.value) - - def setDaysToKeepFeedEntries(self, dayString): - pref = self._cachedPreference(DAYS_TO_KEEP_FEED_ENTRIES) - pref.value = dayString diff -r dc0a82841a60 -r 1e6274cca035 PreferencesDialog.py --- a/PreferencesDialog.py Sun Aug 21 02:44:13 2011 +0200 +++ b/PreferencesDialog.py Mon Aug 22 15:30:33 2011 +0200 @@ -1,13 +1,12 @@ -from Preferences import Preferences from PyQt4 import QtGui from Ui_Preferences import Ui_Preferences class PreferencesDialog(QtGui.QDialog): - def __init__(self, session=None): + def __init__(self, backend=None): QtGui.QWidget.__init__(self, None) - self.session = session - self.preferences = Preferences(session) + self.backend = backend + self.preferences = backend.preferences() self.ui = Ui_Preferences() self.ui.setupUi(self) self.fillUi() @@ -77,10 +76,10 @@ def accept(self): self.storeProxySettings() - self.session.commit() + self.preferences.commit(); QtGui.QDialog.accept(self) def reject(self): - self.session.rollback() + self.preferences.rollback() QtGui.QDialog.reject(self) diff -r dc0a82841a60 -r 1e6274cca035 backend/__init__.py diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/Database.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/Database.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,36 @@ + +from sqlalchemy.engine import create_engine +from sqlalchemy.orm import sessionmaker +import Mapping +import sys +import util + +# Keep the connection to the database only once. The feed updater and the GUI app will +# operate on a single engine/session but this comes in handy for interactive use +engine = None +SessionMaker = None + +def createSession(databaseUrl=None): + if databaseUrl is None: + databaseUrl = _getDatabaseUrl() + initEngine(databaseUrl) + Mapping.createMapping(engine) + initSessionMaker() + return SessionMaker() + +def _getDatabaseUrl(): + if len(sys.argv) < 2: + print("Usage: %s " % (sys.argv[0])) + sys.exit(1) + return sys.argv[1] + +def initEngine(databaseUrl): + global engine + if engine is None: + verbose = util.databaseLoggingEnabled() + engine = create_engine(databaseUrl, echo=verbose) + +def initSessionMaker(): + global SessionMaker + if SessionMaker is None: + SessionMaker = sessionmaker(bind=engine) diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/Feed.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/Feed.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,50 @@ + +from datetime import datetime, timedelta +import FeedEntry + +class Feed(object): + @staticmethod + def all(session): + return session.query(Feed).order_by(Feed.title).all() + + def __init__(self, title, rss_url): + self.title = title + self.rss_url = rss_url + # default: update every 60 minutes + self.update_interval = 60 + self.incrementNextUpdateDate() + self.auto_load_entry_link = False + self.always_open_in_browser = False + + def __repr__(self): + return "" % (self.pk, self.title) + + def userPresentableString(self): + return self.title + + def entriesSortedByUpdateDate(self, hideReadEntries=False): + if hideReadEntries: + sortedEntries = self._unreadEntries() + else: + sortedEntries = list(self.entries) + sortedEntries.sort(FeedEntry.compareByUpdateDate) + return sortedEntries + + def _unreadEntries(self): + retValue = [] + for entry in self.entries: + if not entry.read: + retValue.append(entry) + return retValue + + def incrementNextUpdateDate(self): + delta = timedelta(minutes=self.update_interval) + self.next_update = datetime.now() + delta + + def markAllEntriesRead(self): + for entry in self.entries: + entry.markRead() + + def takeChangesFrom(self, dict): + for key in dict.keys(): + setattr(self, key, dict[key]) diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/FeedEntry.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/FeedEntry.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,43 @@ + +from datetime import datetime + +def compareByUpdateDate(first, second): + return cmp(first.updated, second.updated) + +class FeedEntry(object): + @staticmethod + def findById(id, session): + result = session.query(FeedEntry).filter(FeedEntry.id == id) + return result.first() + + @staticmethod + def create(entry): + new = FeedEntry() + new.id = entry.id + new.link = entry.link + new.title = entry.title + new.updated = entry.updated_parsed + new.summary = entry.summary + return new + + def __init__(self): + self.create_timestamp = datetime.now() + self.read = 0 + + def __repr__(self): + return "" % (self.pk, self.title) + + def userPresentableString(self): + return self.title + + def toggleRead(self): + if self.read: + self.markUnread() + else: + self.markRead() + + def markRead(self): + self.read = 1 + + def markUnread(self): + self.read = 0 diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/FeedList.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/FeedList.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,23 @@ + +from Feed import Feed +from FeedEntry import FeedEntry +from Preferences import Preferences +from sqlalchemy.orm import joinedload + +def getFeeds(session): + preferences = Preferences(session) + if preferences.showOnlyUnreadFeeds(): + return _getUnreadFeeds(session) + else: + return Feed.all(session) + +def _getUnreadFeeds(session): + query = session.query(FeedEntry).filter(FeedEntry.read == 0) + queryWithOptions = query.options(joinedload("feed")) + result = queryWithOptions.all() + return _collectFeeds(result) + +def _collectFeeds(feedEntries): + feeds = [entry.feed for entry in feedEntries] + uniqueFeeds = set(feeds) + return list(uniqueFeeds) diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/FeedUpdater.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/FeedUpdater.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,84 @@ + +from datetime import datetime +from Feed import Feed +from FeedEntry import FeedEntry +import feedparser +import logging + +STATUS_ERROR = 400 +log = logging.getLogger("FeedUpdater") + +def updateAllFeeds(session): + allFeeds = findFeedsToUpdate(session) + for feed in allFeeds: + try: + FeedUpdater(session, feed).update() + except FeedUpdateException, fue: + log.warn("problems while updating feed " + feed.rss_url + ": " + str(fue)) + session.commit() + +def findFeedsToUpdate(session): + return session.query(Feed).filter(Feed.next_update < datetime.now()) + +def normalize(entry): + if not hasattr(entry, "id"): + entry.id = entry.link + if not hasattr(entry, "updated_parsed"): + entry.updated_parsed = datetime.today() + else: + entry.updated_parsed = datetime(*entry.updated_parsed[:6]) + if not hasattr(entry, "summary"): + if hasattr(entry, "content"): + entry.summary = entry.content[0].value + else: + entry.summary = "" + +def createNewFeed(url, session): + # when updating to python3 see http://code.google.com/p/feedparser/issues/detail?id=260 + result = feedparser.parse(url) + if result.has_key("title"): + title = result["feed"].title + else: + title = url + newFeed = Feed(title, url) + session.add(newFeed) + + FeedUpdater(session, newFeed).update() + + +class FeedUpdater(object): + def __init__(self, session, feed): + self.session = session + self.feed = feed + + def update(self): + log.info("updating " + self.feed.rss_url) + result = self.getFeed() + for entry in result.entries: + self.processEntry(entry) + self.feed.incrementNextUpdateDate() + + def getFeed(self): + result = feedparser.parse(self.feed.rss_url) + # bozo flags if a feed is well-formed. +# if result["bozo"] > 0: +# raise FeedUpdateException() + status = result["status"] + if status >= STATUS_ERROR: + raise FeedUpdateException("HTTP status " + str(status)) + return result + + def processEntry(self, entry): + normalize(entry) + feedEntry = FeedEntry.findById(entry.id, self.session) + if feedEntry is None: + self.createFeedEntry(entry) + + def createFeedEntry(self, entry): + new = FeedEntry.create(entry) + new.feed = self.feed + self.session.add(new) + log.info("new feed entry: " + entry.title) + +class FeedUpdateException(Exception): + pass diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/Mapping.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/Mapping.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,75 @@ + +from Feed import Feed +from FeedEntry import FeedEntry +from Preference import Preference +from sqlalchemy import Boolean +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import ForeignKey +from sqlalchemy import Integer +from sqlalchemy import MetaData +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy import Text +from sqlalchemy.orm import mapper +from sqlalchemy.orm import relation + +mappingDefined = False +feedEntryTable = None + +def createMapping(engine): + """ Make sure the mapping is defined only once. This is not really needed for the feed updater + or the GUI app but comes in handy when working interactively with the system. """ + global mappingDefined + if not mappingDefined: + _createMapping(engine) + mappingDefined = True + +def _createMapping(engine): + metadata = MetaData(engine) + metadata.bind = engine + + feedTable = Table("feed", metadata, + Column("pk", Integer, primary_key=True), + Column("title", String(255), nullable=False), + Column("rss_url", String(255), nullable=False), + # update interval is specified in minutes + Column("update_interval", Integer, nullable=False), + Column("next_update", DateTime, nullable=False), + # when displaying an entry of this feed, do not display the summary but rather load + # the link directly + Column("auto_load_entry_link", Boolean, nullable=False), + # this is actually a hack: when opening some sites in the QWebView it just crashes. + # This setting forces to open an entry's link in the external browser + Column("always_open_in_browser", Boolean, nullable=False) + ) + + global feedEntryTable + feedEntryTable = Table("feed_entry", metadata, + Column("pk", Integer, primary_key=True), + Column("create_timestamp", DateTime, nullable=False), + Column("read", Integer, nullable=False), + + Column("id", String(512), nullable=False), + Column("link", String(512), nullable=False), + Column("title", Text, nullable=False), + Column("summary", Text, nullable=False), + Column("updated", DateTime), + Column("feed_id", Integer, ForeignKey("feed.pk")) + ) + + preferencesTable = Table("preference", metadata, + Column("pk", Integer, primary_key=True), + Column("key", String(255), nullable=False), + Column("value", String(255), nullable=False) + ) + + metadata.create_all() + + mapper(FeedEntry, feedEntryTable) + mapper(Feed, feedTable, + properties = { + "entries" : relation(FeedEntry, backref="feed", lazy=True, cascade="delete, delete-orphan") + } + ) + mapper(Preference, preferencesTable) diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/Preference.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/Preference.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,12 @@ + +class Preference(object): + @staticmethod + def forKey(key, session): + return session.query(Preference).filter(Preference.key == key).first() + + def __init__(self, key, value): + self.key = key + self.value = value + + def __repr__(self): + return "" % (self.key, self.value) diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/Preferences.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/Preferences.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,106 @@ + +from Preference import Preference + +DAYS_TO_KEEP_FEED_ENTRIES = "DAYS_TO_KEEP_FEED_ENTRIES" +HIDE_READ_ENTRIES = "HIDE_READ_FEED_ENTRIES" +PROXY_HOST = "PROXY_HOST" +PROXY_PORT = "PROXY_PORT" +SHOW_ONLY_UNREAD_FEEDS = "SHOW_ONLY_UNREAD_FEEDS" +START_MAXIMIZED = "START_MAXIMIZED" + +def str2bool(string): + return string.lower() in ["yes", "true", "t", "1"] + +def bool2str(bool): + if bool: + return "True" + else: + return "False" + +class Preferences(object): + def __init__(self, session): + self.session = session + self.cache = {} + + def _cachedPreference(self, key, defaultValue=None, addIfMissing=True): + if self.cache.has_key(key): + return self.cache[key] + else: + pref = Preference.forKey(key, self.session) + if pref is not None: + self.cache[key] = pref + elif pref is None and addIfMissing: + pref = Preference(key, str(defaultValue)) + self.session.add(pref) + self.cache[key] = pref + return pref + + def commit(self): + self.session.commit() + + def rollback(self): + self.session.rollback() + + def startMaximized(self): + pref = self._cachedPreference(START_MAXIMIZED, False) + return str2bool(pref.value) + + def setStartMaximized(self, flag): + pref = self._cachedPreference(START_MAXIMIZED) + pref.value = bool2str(flag) + + def hideReadFeedEntries(self): + pref = self._cachedPreference(HIDE_READ_ENTRIES, False) + return str2bool(pref.value) + + def setHideReadFeedEntries(self, flag): + pref = self._cachedPreference(HIDE_READ_ENTRIES) + pref.value = bool2str(flag) + + def showOnlyUnreadFeeds(self): + pref = self._cachedPreference(SHOW_ONLY_UNREAD_FEEDS, False) + return str2bool(pref.value) + + def setShowOnlyUnreadFeeds(self, flag): + pref = self._cachedPreference(SHOW_ONLY_UNREAD_FEEDS) + pref.value = bool2str(flag) + + def proxyHost(self): + pref = self._cachedPreference(PROXY_HOST) + return pref.value + + def setProxyHost(self, hostname): + if hostname is None: + pref = self._cachedPreference(PROXY_HOST, addIfMissing=False) + if pref is not None: + self.session.delete(pref) + del(self.cache[PROXY_HOST]) + else: + pref = self._cachedPreference(PROXY_HOST) + pref.value = str(hostname) + + def proxyPort(self): + pref = self._cachedPreference(PROXY_PORT, 3128) + return int(pref.value) + + def setProxyPort(self, port): + if port is None: + pref = self._cachedPreference(PROXY_PORT, addIfMissing=False) + if pref is not None: + self.session.delete(pref) + del(self.cache[PROXY_PORT]) + else: + pref = self._cachedPreference(PROXY_PORT) + pref.value = str(port) + + def isProxyConfigured(self): + pref = self._cachedPreference(PROXY_HOST, addIfMissing=False) + return pref is not None + + def daysToKeepFeedEntries(self): + pref = self._cachedPreference(DAYS_TO_KEEP_FEED_ENTRIES, 90, addIfMissing=True) + return int(pref.value) + + def setDaysToKeepFeedEntries(self, dayString): + pref = self._cachedPreference(DAYS_TO_KEEP_FEED_ENTRIES) + pref.value = dayString diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/SqlAlchemyBackend.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/SqlAlchemyBackend.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,91 @@ + +from Preferences import Preferences +from datetime import datetime, timedelta +from sqlalchemy.sql import and_ +import Database +import FeedList +import FeedUpdater +import Mapping +import logging +import util + +class SqlAlchemyBackend(object): + ''' + Backend that uses sqlalchemy for persistence + ''' + + def __init__(self): + self._initLogging() + self.session = Database.createSession() + self.prefs = Preferences(self.session) + self.updater = None + + def _initLogging(self): + logging.getLogger("sqlalchemy.orm").setLevel(logging.WARN) + + sqlalchemyLogLevel = logging.ERROR + if util.databaseLoggingEnabled(): + sqlalchemyLogLevel = logging.INFO + logging.getLogger("sqlalchemy").setLevel(sqlalchemyLogLevel) + + def preferences(self): + return self.prefs + + def getFeeds(self): + return FeedList.getFeeds(self.session) + + def toggleRead(self, feedEntry): + feedEntry.toggleRead() + self.session.commit() + + def markAllEntriesRead(self, feed): + feed.markAllEntriesRead() + self.session.commit() + + def createFeed(self, url): + try: + FeedUpdater.createNewFeed(url, self.session) + self.session.commit() + except AttributeError as ae: + self.session.rollback() + raise ae + + def updateFeed(self, feed, changes): + feed.takeChangesFrom(changes) + feed.incrementNextUpdateDate() + self.session.commit() + + def deleteFeed(self, feed): + self.session.delete(feed) + self.session.commit() + + def markFeedEntriesAsRead(self, entries): + for entry in entries: + entry.markRead() + self.session.commit() + + def updateAllFeeds(self): + FeedUpdater.updateAllFeeds(self.session) + self.session.commit() + + def expireFeedEntries(self): + logger = logging.getLogger("feedupdater") + expireDate = self._calculateExpireDate() + logger.info("expiring entries older than " + str(expireDate)) + feedEntry = Mapping.feedEntryTable + deleteStatement = feedEntry.delete().where( + and_(feedEntry.c.create_timestamp < expireDate, feedEntry.c.read == 1) + ) + deleteStatement.execute() + self.session.commit() + + def _calculateExpireDate(self): + now = datetime.now() + daysToKeepFeedEntries = self.prefs.daysToKeepFeedEntries() + delta = timedelta(days=daysToKeepFeedEntries) + return now - delta + + def dispose(self): + # save all uncommitted state, just in case + self.session.commit() + self.session.close() diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/__init__.py diff -r dc0a82841a60 -r 1e6274cca035 backend/sqlalchemy/util.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/backend/sqlalchemy/util.py Mon Aug 22 15:30:33 2011 +0200 @@ -0,0 +1,9 @@ + +import sys + +def databaseLoggingEnabled(): + loggingEnabled = False + for arg in sys.argv: + if arg == "--databaseLogging": + loggingEnabled = True + return loggingEnabled diff -r dc0a82841a60 -r 1e6274cca035 feedupdate-main.py --- a/feedupdate-main.py Sun Aug 21 02:44:13 2011 +0200 +++ b/feedupdate-main.py Mon Aug 22 15:30:33 2011 +0200 @@ -1,48 +1,11 @@ #!/usr/bin/env python -import Database -from datetime import datetime, timedelta -from Feed import Feed -import FeedUpdater import logging -import Mapping -from Preferences import Preferences -from sqlalchemy.sql import and_ -import util - -logger = logging.getLogger("feedupdater") - -def listFeeds(session): - allFeeds = session.query(Feed) - for feed in allFeeds: - logger.info("feed: " + feed.name) - for entry in feed.entries: - print entry.title - -def expireFeedEntries(session): - expireDate = _calculateExpireDate(session) - logger.info("expiring entries older than " + str(expireDate)) - feedEntry = Mapping.feedEntryTable - deleteStatement = feedEntry.delete().where( - and_(feedEntry.c.create_timestamp < expireDate, feedEntry.c.read == 1) - ) - deleteStatement.execute() - -def _calculateExpireDate(session): - now = datetime.now() - daysToKeepFeedEntries = Preferences(session).daysToKeepFeedEntries() - delta = timedelta(days=daysToKeepFeedEntries) - return now - delta +from backend.sqlalchemy.SqlAlchemyBackend import SqlAlchemyBackend if __name__ == "__main__": - util.configureLogging() - session = Database.createSession() - - #util.loadFeeds(session) - #util.forceUpdateAllFeeds(session) - #listFeeds(session) - #OpmlImport.createFeedsFromOPML(session, "feed-data/feeds.opml.xml") - FeedUpdater.updateAllFeeds(session) - expireFeedEntries(session) - - session.close() + logging.basicConfig(level=logging.DEBUG) + backend = SqlAlchemyBackend() + backend.updateAllFeeds() + backend.expireFeedEntries() + backend.dispose() diff -r dc0a82841a60 -r 1e6274cca035 feedworm-gui.py --- a/feedworm-gui.py Sun Aug 21 02:44:13 2011 +0200 +++ b/feedworm-gui.py Mon Aug 22 15:30:33 2011 +0200 @@ -1,11 +1,10 @@ -import Database from MainWindow import MainWindow -from Preferences import Preferences from PyQt4 import QtGui from PyQt4.QtNetwork import QNetworkProxy +from backend.sqlalchemy.SqlAlchemyBackend import SqlAlchemyBackend +import logging import sys -import util def setupProxy(preferences): if preferences.isProxyConfigured(): @@ -15,14 +14,14 @@ QNetworkProxy.setApplicationProxy(proxy) if __name__ == '__main__': - util.configureLogging() - session = Database.createSession() - preferences = Preferences(session) + logging.basicConfig(level=logging.DEBUG) + backend = SqlAlchemyBackend() + preferences = backend.preferences() setupProxy(preferences) app = QtGui.QApplication(sys.argv) - mainWindow = MainWindow(session) + mainWindow = MainWindow(backend) maximized = preferences.startMaximized() if maximized: diff -r dc0a82841a60 -r 1e6274cca035 util.py --- a/util.py Sun Aug 21 02:44:13 2011 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,49 +0,0 @@ - -from datetime import datetime, timedelta -from Feed import Feed -import logging -import sys - -logger = logging.getLogger("database") - -def databaseLoggingEnabled(): - loggingEnabled = False - for arg in sys.argv: - if arg == "--databaseLogging": - loggingEnabled = True - return loggingEnabled - -def configureLogging(): - logging.basicConfig(level=logging.DEBUG) - - sqlalchemyLogLevel = logging.ERROR - if databaseLoggingEnabled(): - sqlalchemyLogLevel = logging.INFO - logging.getLogger("sqlalchemy").setLevel(sqlalchemyLogLevel) - - logging.getLogger("sqlalchemy.orm").setLevel(logging.WARN) - -def loadFeeds(session=None, filename="feeds.txt"): - file = open(filename) - for line in file: - (title, rss_url) = line.split("|") - # remove the newline - rss_url = rss_url.rstrip() - feed = Feed(title, rss_url) - session.add(feed) - file.close() - session.commit() - -def forceUpdateAllFeeds(session=None): - for feed in Feed.all(session): - feed.next_update = datetime.now() - timedelta(minutes=1) - session.commit() - -def str2bool(string): - return string.lower() in ["yes", "true", "t", "1"] - -def bool2str(bool): - if bool: - return "True" - else: - return "False"