Planning applications tracker for InLinkUK from BT kiosks. https://kiosks.adrianshort.org/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

116 lines
3.4 KiB

  1. #!/usr/bin/env ruby
  2. require 'scraperwiki'
  3. require 'haml'
  4. require 'pp'
  5. require 'logger'
  6. require 'csv'
  7. require_relative '../lib/helpers'
  8. OUTPUT_DIR = '_site'
  9. VIEWS_DIR = File.join('views')
  10. LAYOUT_FN = File.join(VIEWS_DIR, 'layout.haml')
  11. def write_page(path_items, template, locals = {})
  12. dir = create_path(path_items)
  13. fn = File.join(dir, 'index.html')
  14. # https://stackoverflow.com/questions/6125265/using-layouts-in-haml-files-independently-of-rails
  15. html = Haml::Engine.new(File.read(LAYOUT_FN)).render do
  16. Haml::Engine.new(File.read(File.join(VIEWS_DIR, "#{template}.haml"))).render(Object.new, locals)
  17. end
  18. File.write(fn, html)
  19. @log.info fn
  20. @pages += 1
  21. # TODO - add page to sitemap.xml or sitemap.txt
  22. # https://support.google.com/webmasters/answer/183668?hl=en&ref_topic=4581190
  23. end
  24. def write_csv(path_items, filename, data)
  25. dir = create_path(path_items)
  26. fn = File.join(dir, filename + '.csv')
  27. csv_string = CSV.generate do |csv|
  28. csv << data.first.keys # header row
  29. data.each { |row| csv << row.values }
  30. end
  31. File.write(fn, csv_string)
  32. @log.info fn
  33. end
  34. def create_path(path_items)
  35. dir = File.join(OUTPUT_DIR, path_items)
  36. FileUtils.mkdir_p(dir)
  37. @log.debug dir
  38. dir
  39. end
  40. def create_output_dir
  41. # Recursively delete working directory to ensure no redundant files are left behind from previous builds.
  42. # FileUtils.rm_rf(@working_dir)
  43. Dir.mkdir(@working_dir) unless File.directory?(@working_dir)
  44. # Dir.chdir(@working_dir)
  45. # Copy `public` dir to output dir
  46. FileUtils.copy_entry('public', @working_dir)
  47. end
  48. def gen_homepage
  49. summary = ScraperWiki.select("
  50. authority_name, status, decision, appeal_status, appeal_decision, count(*) as applications
  51. from applications
  52. group by authority_name, status, decision, appeal_status, appeal_decision
  53. ")
  54. write_page('.', 'index', { summary: summary })
  55. # Summary CSV file
  56. write_csv('.', 'inlink-summary', summary)
  57. # Full CSV file
  58. apps = ScraperWiki.select("* from applications")
  59. write_csv('.', 'inlink-full', apps)
  60. end
  61. def gen_new
  62. apps = ScraperWiki.select("* from `applications` order by date_received desc limit 30")
  63. write_page('new-applications', 'new', { apps: apps })
  64. end
  65. def gen_decisions
  66. apps = ScraperWiki.select("* from `applications` order by date_decision desc limit 30")
  67. write_page('decisions', 'decisions', { apps: apps })
  68. end
  69. def gen_authorities
  70. auths = ScraperWiki.select("distinct(authority_name) as authority_name from applications order by authority_name")
  71. write_page('authorities', 'authorities', { auths: auths })
  72. auths.each do |auth|
  73. summary = ScraperWiki.select("
  74. status, decision, appeal_status, appeal_decision, count(*) as qty
  75. from applications
  76. where authority_name = '#{auth['authority_name']}'
  77. group by status, decision, appeal_status, appeal_decision
  78. ")
  79. apps = ScraperWiki.select("* from applications where authority_name='#{auth['authority_name']}' order by date_received")
  80. write_page(['authorities', slug(auth['authority_name'])], 'authority', { apps: apps, auth: auth, summary: summary })
  81. write_csv(['authorities', slug(auth['authority_name'])], slug(auth['authority_name']), apps)
  82. end
  83. end
  84. @working_dir = File.join(Dir.pwd, OUTPUT_DIR)
  85. puts @working_dir
  86. # exit
  87. @log = Logger.new($stdout)
  88. @log.level = Logger::INFO
  89. @pages = 0
  90. create_output_dir
  91. gen_homepage
  92. gen_new
  93. gen_decisions
  94. gen_authorities