diff --git a/lib/site.rb b/lib/site.rb index 846cbcd..4edbc09 100644 --- a/lib/site.rb +++ b/lib/site.rb @@ -3,15 +3,16 @@ require 'scraperwiki' class Site def self.generate # Home page + path = '.' + summary = ScraperWiki.select(" authority_name, status, decision, appeal_status, appeal_decision, count(*) as applications from applications group by authority_name, status, decision, appeal_status, appeal_decision ") - Petrify.page('.', 'index', { summary: summary }) - # Summary CSV file - Petrify.csv('.', 'inlink-summary', summary) + Petrify.page(path, 'index', { summary: summary }) + Petrify.csv(path, 'inlink-summary', summary) # New applications page apps = ScraperWiki.select("* from `applications` order by date_received desc limit 30") @@ -22,6 +23,8 @@ class Site Petrify.page('decisions', 'decisions', { apps: apps }) # Appeals page + path = 'appeals' + summary = ScraperWiki.select(" authority_name, appeal_status, appeal_decision, count(*) as applications from applications @@ -36,13 +39,15 @@ class Site and appeal_status != 'Unknown' ") - Petrify.page('appeals', 'appeals', { summary: summary, apps: apps }) - Petrify.csv('appeals', 'inlink-appeals', apps) + Petrify.page(path, 'appeals', { summary: summary, apps: apps }) + Petrify.csv(path, 'inlink-appeals', apps) - # Page and CSV file for each authority + # Authority pages auths = ScraperWiki.select("distinct(authority_name) as authority_name from applications") auths.each do |auth| + path = ['authorities', slug(auth['authority_name'])] + summary = ScraperWiki.select(" status, decision, appeal_status, appeal_decision, count(*) as qty from applications @@ -51,7 +56,7 @@ class Site ", auth['authority_name']) apps = ScraperWiki.select("* from applications where authority_name = ? order by date_received", auth['authority_name']) - path = ['authorities', slug(auth['authority_name'])] + Petrify.page(path, 'authority', { apps: apps, auth: auth, summary: summary }) Petrify.csv(path, slug(auth['authority_name']), apps) end