|
|
@@ -2,6 +2,7 @@ require 'scraperwiki' |
|
|
|
require 'petrify' |
|
|
|
require 'csv' |
|
|
|
require 'json' |
|
|
|
require 'rss' |
|
|
|
|
|
|
|
class Site |
|
|
|
def self.generate |
|
|
@@ -70,6 +71,21 @@ class Site |
|
|
|
stories = CSV.read('media.csv', :headers => true ) |
|
|
|
Petrify.page('media', 'media', { stories: stories, title: "Media" }) |
|
|
|
|
|
|
|
feed = RSS::Maker.make("2.0") do |maker| |
|
|
|
maker.channel.title = "InLinkUK kiosks media coverage" |
|
|
|
maker.channel.description = "News and views about Google's UK street kiosk network." |
|
|
|
maker.channel.link = "https://kiosks.adrianshort.org/media/" |
|
|
|
maker.channel.updated = Time.now.to_s |
|
|
|
stories.each do |story| |
|
|
|
maker.items.new_item do |item| |
|
|
|
item.link = story['url'] |
|
|
|
item.title = "%s: %s" % [ story['publication'], story['title'] ] |
|
|
|
item.updated = story['publish_date'] |
|
|
|
end |
|
|
|
end |
|
|
|
end |
|
|
|
Petrify.file('media', 'index.xml', feed) |
|
|
|
|
|
|
|
# Authority pages |
|
|
|
auths = ScraperWiki.select("distinct(authority_name) as authority_name |
|
|
|
from applications") |
|
|
|