|
- require 'scraperwiki'
-
- class Site
- def self.generate
- # Home page
- summary = ScraperWiki.select("
- authority_name, status, decision, appeal_status, appeal_decision, count(*) as applications
- from applications
- group by authority_name, status, decision, appeal_status, appeal_decision
- ")
- Petrify.page('.', 'index', { summary: summary })
-
- # Summary CSV file
- Petrify.csv('.', 'inlink-summary', summary)
-
- # New applications page
- apps = ScraperWiki.select("* from `applications` order by date_received desc limit 30")
- Petrify.page('new-applications', 'new-applications', { apps: apps })
-
- # Latest decisions page
- apps = ScraperWiki.select("* from `applications` order by date_decision desc limit 30")
- Petrify.page('decisions', 'decisions', { apps: apps })
-
- # Page and CSV file for each authority
- auths = ScraperWiki.select("distinct(authority_name) as authority_name from applications")
-
- auths.each do |auth|
- summary = ScraperWiki.select("
- status, decision, appeal_status, appeal_decision, count(*) as qty
- from applications
- where authority_name = ?
- group by status, decision, appeal_status, appeal_decision
- ", auth['authority_name'])
-
- apps = ScraperWiki.select("* from applications where authority_name = ? order by date_received", auth['authority_name'])
- path = ['authorities', slug(auth['authority_name'])]
- Petrify.page(path, 'authority', { apps: apps, auth: auth, summary: summary })
- Petrify.csv(path, slug(auth['authority_name']), apps)
- end
- end
- end
|