Planning applications tracker for InLinkUK from BT kiosks. https://kiosks.adrianshort.org/
No puede seleccionar más de 25 temas Los temas deben comenzar con una letra o número, pueden incluir guiones ('-') y pueden tener hasta 35 caracteres de largo.
 
 
 
 
 

70 líneas
2.2 KiB

  1. #!/usr/bin/env ruby
  2. require 'scraperwiki'
  3. require 'haml'
  4. require 'pp'
  5. require 'logger'
  6. require_relative '../lib/helpers'
  7. OUTPUT_DIR = '_site'
  8. VIEWS_DIR = File.join('views')
  9. LAYOUT_FN = File.join(VIEWS_DIR, 'layout.haml')
  10. def write_page(path_items, template, locals = {})
  11. dir = File.join(OUTPUT_DIR, path_items)
  12. FileUtils.mkdir_p(dir)
  13. @log.debug dir
  14. fn = File.join(dir, 'index.html')
  15. # https://stackoverflow.com/questions/6125265/using-layouts-in-haml-files-independently-of-rails
  16. html = Haml::Engine.new(File.read(LAYOUT_FN)).render do
  17. Haml::Engine.new(File.read(File.join(VIEWS_DIR, "#{template}.haml"))).render(Object.new, locals)
  18. end
  19. File.write(fn, html)
  20. @log.info fn
  21. @pages += 1
  22. # TODO - add page to sitemap.xml or sitemap.txt
  23. # https://support.google.com/webmasters/answer/183668?hl=en&ref_topic=4581190
  24. end
  25. def create_output_dir
  26. # Recursively delete working directory to ensure no redundant files are left behind from previous builds.
  27. # FileUtils.rm_rf(@working_dir)
  28. Dir.mkdir(@working_dir) unless File.directory?(@working_dir)
  29. # Dir.chdir(@working_dir)
  30. # Copy `public` dir to output dir
  31. FileUtils.copy_entry('public', @working_dir)
  32. end
  33. def gen_homepage
  34. decisions = ScraperWiki.select("* from `applications` order by date_decision desc limit 20")
  35. write_page('.', 'index', { decisions: decisions })
  36. end
  37. def gen_authorities
  38. auths = ScraperWiki.select("distinct(authority_name) as authority_name from applications order by authority_name")
  39. write_page('authorities', 'authorities', { auths: auths })
  40. auths.each do |auth|
  41. summary = ScraperWiki.select("
  42. status, decision, appeal_status, appeal_decision, count(*) as qty
  43. from applications
  44. where authority_name = '#{auth['authority_name']}'
  45. group by status, decision, appeal_status, appeal_decision
  46. ")
  47. apps = ScraperWiki.select("* from applications where authority_name='#{auth['authority_name']}' order by date_received")
  48. write_page(['authorities', slug(auth['authority_name'])], 'authority', { apps: apps, auth: auth, summary: summary })
  49. end
  50. end
  51. @working_dir = File.join(Dir.pwd, OUTPUT_DIR)
  52. puts @working_dir
  53. # exit
  54. @log = Logger.new($stdout)
  55. @pages = 0
  56. create_output_dir
  57. gen_homepage
  58. gen_authorities