Planning applications tracker for InLinkUK from BT kiosks. https://kiosks.adrianshort.org/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

101 lines
3.2 KiB

  1. require 'scraperwiki'
  2. require 'petrify'
  3. require 'csv'
  4. require 'json'
  5. class Site
  6. def self.generate
  7. # Home page
  8. summary = ScraperWiki.select("
  9. authority_name, status, decision, appeal_status, appeal_decision,
  10. count(*) as applications
  11. from applications
  12. group by authority_name, status, decision, appeal_status, appeal_decision
  13. ")
  14. q = ScraperWiki.select("
  15. scraped_at
  16. from applications
  17. order by scraped_at desc
  18. limit 1")
  19. last_updated = DateTime.parse(q[0]['scraped_at'])
  20. path = '.'
  21. Petrify.page(path, 'index', \
  22. { summary: summary, last_updated: last_updated })
  23. Petrify.csv(path, 'inlink-summary', summary)
  24. # Generate a JSON file with all the data
  25. apps = ScraperWiki.select("* from applications")
  26. Petrify.file(path, 'data.json', apps.to_json)
  27. # New applications page
  28. apps = ScraperWiki.select("* from `applications`
  29. order by date_received desc limit 60")
  30. Petrify.page('new-applications', 'new-applications', { apps: apps, title: "New applications" })
  31. # Latest decisions page
  32. apps = ScraperWiki.select("* from `applications`
  33. order by date_decision desc limit 60")
  34. path = 'decisions'
  35. Petrify.page(path, 'decisions', { apps: apps, title: "Latest decisions" })
  36. Petrify.csv(path, 'inlink-decisions', apps)
  37. # Appeals page
  38. summary = ScraperWiki.select("
  39. authority_name, status, decision, appeal_status, appeal_decision,
  40. count(*) as applications
  41. from applications
  42. where (appeal_status is not null
  43. and appeal_status != 'Unknown')
  44. or status like '%appeal%'
  45. group by authority_name, appeal_status, appeal_decision
  46. collate nocase
  47. ")
  48. apps = ScraperWiki.select("
  49. * from applications
  50. where (appeal_status is not null
  51. and appeal_status != 'Unknown')
  52. or status like '%appeal%'
  53. collate nocase
  54. ")
  55. path = 'appeals'
  56. Petrify.page(path, 'appeals', { summary: summary, apps: apps, title: "Appeals" })
  57. Petrify.csv(path, 'inlink-appeals', apps)
  58. # Media page
  59. stories = CSV.read('media.csv', :headers => true )
  60. Petrify.page('media', 'media', { stories: stories, title: "Media" })
  61. # Authority pages
  62. auths = ScraperWiki.select("distinct(authority_name) as authority_name
  63. from applications")
  64. auths.each do |auth|
  65. summary = ScraperWiki.select("
  66. status, decision, appeal_status, appeal_decision, count(*) as qty
  67. from applications
  68. where authority_name = ?
  69. group by status, decision, appeal_status, appeal_decision
  70. ", auth['authority_name'])
  71. apps = ScraperWiki.select("* from applications where authority_name = ?
  72. order by date_received desc", auth['authority_name'])
  73. this_stories = stories.select do |story|
  74. if story['authorities']
  75. story['authorities'].match(auth['authority_name'])
  76. end
  77. end
  78. path = ['authorities', slug(auth['authority_name'])]
  79. Petrify.page(path, 'authority', \
  80. { apps: apps, auth: auth, summary: summary, stories: this_stories, title: auth['authority_name'] })
  81. Petrify.csv(path, slug(auth['authority_name']), apps)
  82. end
  83. end
  84. end