Planning applications tracker for InLinkUK from BT kiosks. https://kiosks.adrianshort.org/
25개 이상의 토픽을 선택하실 수 없습니다. Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

build 2.2 KiB

6 년 전
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869
  1. #!/usr/bin/env ruby
  2. require 'scraperwiki'
  3. require 'haml'
  4. require 'pp'
  5. require 'logger'
  6. require_relative '../lib/helpers'
  7. OUTPUT_DIR = '_site'
  8. VIEWS_DIR = File.join('views')
  9. LAYOUT_FN = File.join(VIEWS_DIR, 'layout.haml')
  10. def write_page(path_items, template, locals = {})
  11. dir = File.join(OUTPUT_DIR, path_items)
  12. FileUtils.mkdir_p(dir)
  13. @log.debug dir
  14. fn = File.join(dir, 'index.html')
  15. # https://stackoverflow.com/questions/6125265/using-layouts-in-haml-files-independently-of-rails
  16. html = Haml::Engine.new(File.read(LAYOUT_FN)).render do
  17. Haml::Engine.new(File.read(File.join(VIEWS_DIR, "#{template}.haml"))).render(Object.new, locals)
  18. end
  19. File.write(fn, html)
  20. @log.info fn
  21. @pages += 1
  22. # TODO - add page to sitemap.xml or sitemap.txt
  23. # https://support.google.com/webmasters/answer/183668?hl=en&ref_topic=4581190
  24. end
  25. def create_output_dir
  26. # Recursively delete working directory to ensure no redundant files are left behind from previous builds.
  27. # FileUtils.rm_rf(@working_dir)
  28. Dir.mkdir(@working_dir) unless File.directory?(@working_dir)
  29. # Dir.chdir(@working_dir)
  30. # Copy `public` dir to output dir
  31. FileUtils.copy_entry('public', @working_dir)
  32. end
  33. def gen_homepage
  34. decisions = ScraperWiki.select("* from `applications` order by date_decision desc limit 20")
  35. write_page('.', 'index', { decisions: decisions })
  36. end
  37. def gen_authorities
  38. auths = ScraperWiki.select("distinct(authority_name) as authority_name from applications order by authority_name")
  39. write_page('authorities', 'authorities', { auths: auths })
  40. auths.each do |auth|
  41. summary = ScraperWiki.select("
  42. status, decision, appeal_status, appeal_decision, count(*) as qty
  43. from applications
  44. where authority_name = '#{auth['authority_name']}'
  45. group by status, decision, appeal_status, appeal_decision
  46. ")
  47. apps = ScraperWiki.select("* from applications where authority_name='#{auth['authority_name']}' order by date_received")
  48. write_page(['authorities', slug(auth['authority_name'])], 'authority', { apps: apps, auth: auth, summary: summary })
  49. end
  50. end
  51. @working_dir = File.join(Dir.pwd, OUTPUT_DIR)
  52. puts @working_dir
  53. # exit
  54. @log = Logger.new($stdout)
  55. @pages = 0
  56. create_output_dir
  57. gen_homepage
  58. gen_authorities