|
@@ -3,17 +3,25 @@ require 'scraperwiki' |
|
|
|
|
|
|
|
|
auths = UKPlanningScraper::Authority.all |
|
|
auths = UKPlanningScraper::Authority.all |
|
|
|
|
|
|
|
|
|
|
|
scrapes = [ |
|
|
|
|
|
{ validated_days: ENV['MORPH_DAYS'].to_i, keywords: 'inlink'}, |
|
|
|
|
|
{ validated_days: ENV['MORPH_DAYS'].to_i, keywords: 'bt phone kiosk'} |
|
|
|
|
|
] |
|
|
|
|
|
|
|
|
auths.each_with_index do |auth, i| |
|
|
auths.each_with_index do |auth, i| |
|
|
puts "#{i + 1} of #{auths.size}: #{auth.name}" |
|
|
puts "#{i + 1} of #{auths.size}: #{auth.name}" |
|
|
begin |
|
|
|
|
|
apps = auth.scrape({ validated_days: ENV['MORPH_DAYS'].to_i, keywords: 'inlink'}) |
|
|
|
|
|
apps.each do |app| |
|
|
|
|
|
unless app[:description].match(/chainlink/i) # Backend keyword search is weak |
|
|
|
|
|
ScraperWiki.save_sqlite([:authority_name, :council_reference], app, 'applications') |
|
|
|
|
|
|
|
|
scrapes.each_with_index do |scrape, j| |
|
|
|
|
|
puts "Scrape #{j + 1} of #{scrapes.size}: keywords: #{scrape[:keywords]}" |
|
|
|
|
|
begin |
|
|
|
|
|
apps = auth.scrape(scrape) |
|
|
|
|
|
apps.each do |app| |
|
|
|
|
|
unless app[:description].match(/chainlink/i) # Backend keyword search is weak |
|
|
|
|
|
ScraperWiki.save_sqlite([:authority_name, :council_reference], app, 'applications') |
|
|
|
|
|
end |
|
|
end |
|
|
end |
|
|
|
|
|
puts "#{auth.name}: #{apps.size} application(s) saved." |
|
|
|
|
|
rescue StandardError => e |
|
|
|
|
|
puts "Error: #{e}" |
|
|
end |
|
|
end |
|
|
puts "#{auth.name}: #{apps.size} application(s) saved." |
|
|
|
|
|
rescue StandardError => e |
|
|
|
|
|
puts "Error: #{e}" |
|
|
|
|
|
end |
|
|
end |
|
|
end |
|
|
end |