diff --git a/bin/build b/bin/build index 252b8ae..45ea8fb 100755 --- a/bin/build +++ b/bin/build @@ -4,6 +4,7 @@ require 'scraperwiki' require 'haml' require 'pp' require 'logger' +require 'csv' require_relative '../lib/helpers' OUTPUT_DIR = '_site' @@ -28,6 +29,15 @@ def write_page(path_items, template, locals = {}) # https://support.google.com/webmasters/answer/183668?hl=en&ref_topic=4581190 end +def write_csv(path_items, filename, data) + dir = File.join(OUTPUT_DIR, path_items) + FileUtils.mkdir_p(dir) + @log.debug dir + fn = File.join(dir, filename + '.csv') + File.write(fn, data) + @log.info fn +end + def create_output_dir # Recursively delete working directory to ensure no redundant files are left behind from previous builds. # FileUtils.rm_rf(@working_dir) @@ -56,6 +66,13 @@ def gen_authorities ") apps = ScraperWiki.select("* from applications where authority_name='#{auth['authority_name']}' order by date_received") write_page(['authorities', slug(auth['authority_name'])], 'authority', { apps: apps, auth: auth, summary: summary }) + + csv_string = CSV.generate do |csv| + csv << apps.first.keys # header row + apps.each { |app| csv << app.values } + end + + write_csv(['authorities', slug(auth['authority_name'])], slug(auth['authority_name']), csv_string) end end diff --git a/public/style.css b/public/style.css index 9661a5b..69a8774 100644 --- a/public/style.css +++ b/public/style.css @@ -162,6 +162,13 @@ nav padding: 10px 20px; } +.button, .button a, .button a:visited { + background-color: green; + color: white; + padding: 5px 10px; + margin: 10px 0; +} + .menu { margin: 0; padding: 0; diff --git a/views/authority.haml b/views/authority.haml index 21d7240..ae06e2f 100644 --- a/views/authority.haml +++ b/views/authority.haml @@ -21,6 +21,11 @@ %h2 Applications +%p + - csv_fn = slug(auth['authority_name']) + '.csv' + %a.button{ :href => csv_fn, :download => csv_fn } + Download CSV data + %table %thead %tr