feedscraper.py 1.1 KB

12345678910111213141516171819202122232425262728293031323334353637
  1. """A simple web server that accepts POSTS containing a list of feed urls,
  2. and returns the titles of those feeds.
  3. """
  4. import eventlet
  5. feedparser = eventlet.import_patched('feedparser')
  6. # the pool provides a safety limit on our concurrency
  7. pool = eventlet.GreenPool()
  8. def fetch_title(url):
  9. d = feedparser.parse(url)
  10. return d.feed.get('title', '')
  11. def app(environ, start_response):
  12. if environ['REQUEST_METHOD'] != 'POST':
  13. start_response('403 Forbidden', [])
  14. return []
  15. # the pile collects the result of a concurrent operation -- in this case,
  16. # the collection of feed titles
  17. pile = eventlet.GreenPile(pool)
  18. for line in environ['wsgi.input'].readlines():
  19. url = line.strip()
  20. if url:
  21. pile.spawn(fetch_title, url)
  22. # since the pile is an iterator over the results,
  23. # you can use it in all sorts of great Pythonic ways
  24. titles = '\n'.join(pile)
  25. start_response('200 OK', [('Content-type', 'text/plain')])
  26. return [titles]
  27. if __name__ == '__main__':
  28. from eventlet import wsgi
  29. wsgi.server(eventlet.listen(('localhost', 9010)), app)