Commits

Janto Dreijer committed 2f62e3e

add rss feed

  • Participants
  • Parent commits b28fd3a

Comments (0)

Files changed (3)

 		search_box_html = SearchPage.search_box()
 
 		# latest changes
-		listing_urls = [] # the urls checked for updates
+		checked_urls = [] # the urls checked for updates
 		show_latest_changes = 1
 		newest_packages_html = ""
 		if show_latest_changes:
 			newest_packages_html = []
 
-			oldest = datetime.datetime.fromtimestamp(time.time() - SECONDS_IN_WEEK * 3)
-			news_items = []
 			for package in Package.packages().values():
-				first_char = package.name[0]
-				package_name = package.name
 				short_name = package.info()["short_name"]
-
-				package_news_items = []
-				for dist in ["2.5", "2.6", "3.0", "any", "source"]: # check various distributions
-					url = "http://pypi.python.org/packages/%(dist)s/%(first_char)s/%(package_name)s/" % locals()
-					listing_urls.append(url) # remember for forcing fetch
-					#~ items = fetch_links_with_dates(url, FETCH_CACHE_AGE*random.uniform(0.5, 1.5))
-					items = fetch_links_with_dates(url, FETCH_CACHE_AGE)
-					if items is None:
-						continue
-					package_news_items.extend([(name, _url, t) for name, _url, t in items if oldest < t])
-				package_news_items.sort(key=lambda c: (c[-1], name))
-
+				package_news_items, _checked_urls = package.release_files(return_checked_urls=True)
+				checked_urls.extend(_checked_urls)
 				if package_news_items:
 					actions = ", ".join(name for name, _url, t in package_news_items)
-
 					newest_packages_html.append('<a href="/%(short_name)s" title="%(actions)s">%(short_name)s</a><br />\n' % locals())
 
 			newest_packages_html = "\n".join(sorted(newest_packages_html)[:5])
 		n = memcache.get(key)
 		if n is None:
 			n = 0
-		memcache.set(key, (n+1) % len(listing_urls)) # set the next url to be fetched
-		url = listing_urls[n]
+		memcache.set(key, (n+1) % len(checked_urls)) # set the next url to be fetched
+		url = checked_urls[n]
 		self.logger.info("forcing fetch of url: %s" % url)
 		newest_packages_html += "<!-- forced fetch of url : %s -->\n" % url
-		get_url(url, cache_duration=FETCH_CACHE_AGE, force_fetch=True)
+		get_url(url, force_fetch=True, cache_duration=PACKAGE_NEWS_CACHE_DURATION)
 
 
 		# admin sidebar
 		self.write(get_template("contribute_page") % locals())
 		self.print_footer()
 
-
 class PackagesPage(Page):
 
 	name = "scikits"
 		self.name = name
 		self.repo_url = repo_url
 
+	def release_files(self, return_checked_urls=False):
+		first_char = self.name[0]
+		package_name = self.name
+		short_name = self.info()["short_name"]
+
+		oldest = datetime.datetime.fromtimestamp(time.time() - SECONDS_IN_WEEK * 3)
+
+		package_news_items = []
+		checked_urls = []
+		for dist in ["2.5", "2.6", "3.0", "any", "source"]: # check various distributions
+			url = "http://pypi.python.org/packages/%(dist)s/%(first_char)s/%(package_name)s/" % locals()
+			checked_urls.append(url) # remember for forcing fetch
+			items = fetch_links_with_dates(url, cache_duration=PACKAGE_NEWS_CACHE_DURATION)
+			if items is None:
+				continue
+			package_news_items.extend([(name, _url, t) for name, _url, t in items if oldest < t])
+		package_news_items.sort(key=lambda c: (c[-1], c[0])) # oldest first
+
+		if return_checked_urls:
+			return package_news_items, checked_urls
+
+		return package_news_items
+
 	@classmethod
 	def packages(self):
 		packages, expired = Cache.get("packages")
 					package = Package(name=package_name, repo_url=repo_url)
 					packages[package.name] = package
 
-			assert Cache.set(key="packages", value=packages, duration=FETCH_CACHE_AGE), package
+			assert Cache.set(key="packages", value=packages, duration=PACKAGE_LISTING_CACHE_DURATION), package
 
 		return packages
 
 			revision="",
 			people="",
 			)
-		doap_result = get_url("http://pypi.python.org/pypi?:action=doap&name=%s" % self.name, force_fetch=force_fetch)
+		doap_result = get_url(
+			"http://pypi.python.org/pypi?:action=doap&name=%s" % self.name,
+			force_fetch=force_fetch,
+			cache_duration=PACKAGE_INFO_CACHE_DURATION,
+			)
 		if doap_result.status_code == 200:
 
 			doap_text = doap_result.content
 		self.write('<a href="%s">sign out</a>.' % users.create_logout_url("/admin"))
 		self.write("</p>")
 
-		key = "next_package_fetch_index"
-		self.write("<h2>%s</h2>" % key)
-		self.write(memcache.get(key))
-
 		# memcache management
 		self.write("<h2>memcache</h2>")
 		if self.request.get("clear_memcache"):
 			memcache.flush_all()
 			self.write("<p><strong>flushed memcache</strong></p>")
+
 		self.write("""
 <p>
 %s
 </p>
 		""" % memcache.get_stats())
 
+		key = "next_package_fetch_index"
+		self.write("<h3>%s</h3>" % key)
+		self.write(memcache.get(key))
+
+		key = "next_listing_url_index"
+		self.write("<h3>%s</h3>" % key)
+		self.write(memcache.get(key))
+
 		self.print_footer()
 
 	post = get
 	def get(self):
 		return
 
+class RSSFeedPage(Page):
+	def get(self):
+
+		items = []
+		for package in Package.packages().values():
+			d = package.info()
+			short_name = d["short_name"]
+			for (name, url, t) in package.release_files():
+				rss_item = PyRSS2Gen.RSSItem(
+					title = name,
+					link = "http://scikits.appspot.com/%s" % short_name,
+					description = 'Released file: <a href="%(url)s">%(url)s</a>' % locals(),
+					guid = PyRSS2Gen.Guid("http://scikits.appspot.com/%(short_name)s?feed_update=%(name)s" % locals()),
+					pubDate = t)
+				items.append(rss_item)
+
+		rss = PyRSS2Gen.RSS2(
+			title = "SciKits",
+			link = "http://scikits.appspot.com/",
+			description = "Updates to SciKits release files",
+			lastBuildDate = datetime.datetime.now(),
+			items = items)
+
+		self.write(rss.to_xml())
+
 application = webapp.WSGIApplication([
 	('/', MainPage),
 
 	('/debug', DebugPage),
 	('/edit', EditPage),
 	('/robots.txt', RobotsPage),
+	('/feed', RSSFeedPage),
 
 	('/(.+)', PackageInfoPage),
 	], debug=True)

code/templates.py

 </p>
 
 <p>
-<img src="/static/images/download_large.png" width="16" border="0" /> Download:  %(download_link)s <br />
+<img src="/static/images/download_32.png" width="16" border="0" /> Download:  %(download_link)s <br />
 Homepage: <a href="%(homepage)s">%(homepage)s</a> <br />
 PyPI: <a href="http://pypi.python.org/pypi/%(name)s">http://pypi.python.org/pypi/%(name)s</a> <br />
 Source Repository: <a href="%(repo_url)s">%(repo_url)s</a> <br />
 
 import xmlrpclib
 
+import PyRSS2Gen
+
 # set up locations
 ROOT = os.path.dirname(__file__)
 ON_DEV_SERVER = os.environ.get("SERVER_SOFTWARE", "dev").lower().startswith("dev")
 SECONDS_IN_MONTH = SECONDS_IN_DAY * 28
 
 # how often new data needs to be loaded
-FETCH_CACHE_AGE = SECONDS_IN_HOUR * 2
+PACKAGE_INFO_CACHE_DURATION = SECONDS_IN_HOUR * 2
+PACKAGE_NEWS_CACHE_DURATION = SECONDS_IN_DAY * 2
+PACKAGE_LISTING_CACHE_DURATION = SECONDS_IN_HOUR * 2
 
 import time
 
 		timeout = (time.time()+duration) if duration is not None else None
 		return memcache.set(key=key, value=(value, timeout))
 
-def get_url(url, force_fetch=False, cache_duration=FETCH_CACHE_AGE):
-	if cache_duration is None:
-		cache_duration = FETCH_CACHE_AGE
+def get_url(url, force_fetch=False, cache_duration=None):
 	response, expired = Cache.get(url)
 	if expired or force_fetch:
 		logger.debug("fetching %s" % url)