view feedupdate-main.py @ 4:e0199f383442

retrieve a feed for the given URL, store entries as feed_entry rows into the database
author Dirk Olmes <dirk@xanthippe.ping.de>
date Mon, 26 Apr 2010 19:33:07 +0200
parents 8a624ee48a74
children bfd47f55d85b
line wrap: on
line source

#!/usr/bin/env python

from ConfigParser import ConfigParser
from Feed import Feed
from FeedUpdater import updateAllFeeds
import Mapping
import socket
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker

def loadDatabaseProperties():
    hostname = socket.gethostname()
    filename = "database-%s.ini" % hostname

    parser = ConfigParser();
    parser.read(filename)
    return parser.get("database", "url")

def createFeed(session):
    newFeed = Feed("Joel on Software", "http://www.joelonsoftware.com/rss.xml")
    session.add(newFeed)
    session.commit()

def listFeeds(session):
    allFeeds = session.query(Feed)
    for feed in allFeeds:
        print "\n\nfeed: " + feed.name
        for entry in feed.entries:
            print entry.title

if __name__ == "__main__":
    databaseUrl = loadDatabaseProperties()
    engine = create_engine(databaseUrl,echo=True)
    Mapping.createMapping(engine)
    SessionMaker = sessionmaker(bind = engine)
    session = SessionMaker()
    
    #createFeed(session)
    updateAllFeeds(session)
    #listFeeds(session)