Planning applications tracker for InLinkUK from BT kiosks. https://kiosks.adrianshort.org/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

80 lines
2.7 KiB

  1. require 'scraperwiki'
  2. require 'petrify'
  3. require 'csv'
  4. class Site
  5. def self.generate
  6. # Home page
  7. summary = ScraperWiki.select("
  8. authority_name, status, decision, appeal_status, appeal_decision, count(*) as applications
  9. from applications
  10. group by authority_name, status, decision, appeal_status, appeal_decision
  11. ")
  12. q = ScraperWiki.select("
  13. scraped_at
  14. from applications
  15. order by scraped_at desc
  16. limit 1")
  17. last_updated = DateTime.parse(q[0]['scraped_at'])
  18. path = '.'
  19. Petrify.page(path, 'index', { summary: summary, last_updated: last_updated })
  20. Petrify.csv(path, 'inlink-summary', summary)
  21. # New applications page
  22. apps = ScraperWiki.select("* from `applications` order by date_received desc limit 60")
  23. Petrify.page('new-applications', 'new-applications', { apps: apps })
  24. # Latest decisions page
  25. apps = ScraperWiki.select("* from `applications` order by date_decision desc limit 60")
  26. path = 'decisions'
  27. Petrify.page(path, 'decisions', { apps: apps })
  28. Petrify.csv(path, 'inlink-decisions', apps)
  29. # Appeals page
  30. summary = ScraperWiki.select("
  31. authority_name, appeal_status, appeal_decision, count(*) as applications
  32. from applications
  33. where appeal_status is not null
  34. and appeal_status != 'Unknown'
  35. group by authority_name, appeal_status, appeal_decision
  36. ")
  37. apps = ScraperWiki.select("
  38. * from applications
  39. where appeal_status is not null
  40. and appeal_status != 'Unknown'
  41. ")
  42. path = 'appeals'
  43. Petrify.page(path, 'appeals', { summary: summary, apps: apps })
  44. Petrify.csv(path, 'inlink-appeals', apps)
  45. # Media page
  46. media_items = CSV.read('media.csv', :headers => true )
  47. Petrify.page('media', 'media', { media_items: media_items })
  48. # Authority pages
  49. auths = ScraperWiki.select("distinct(authority_name) as authority_name from applications")
  50. auths.each do |auth|
  51. summary = ScraperWiki.select("
  52. status, decision, appeal_status, appeal_decision, count(*) as qty
  53. from applications
  54. where authority_name = ?
  55. group by status, decision, appeal_status, appeal_decision
  56. ", auth['authority_name'])
  57. apps = ScraperWiki.select("* from applications where authority_name = ? order by date_received desc", auth['authority_name'])
  58. stories = media_items.select { |s| s[4].match(auth['authority_name'])}
  59. path = ['authorities', slug(auth['authority_name'])]
  60. Petrify.page(path, 'authority', { apps: apps, auth: auth, summary: summary, stories: stories })
  61. Petrify.csv(path, slug(auth['authority_name']), apps)
  62. end
  63. end
  64. end