require 'uk_planning_scraper' require 'scraperwiki' auths = UKPlanningScraper::Authority.all auths.each_with_index do |auth, i| puts "#{i + 1} of #{auths.size}: #{auth.name}" begin apps = auth.scrape({ validated_days: ENV['MORPH_DAYS'].to_i, keywords: 'inlink'}) apps.each do |app| unless app[:description].match(/chainlink/i) # Backend keyword search is weak ScraperWiki.save_sqlite([:authority_name, :council_reference], app, 'applications') end end puts "#{auth.name}: #{apps.size} application(s) saved." rescue StandardError => e puts "Error: #{e}" end end