Browse Source

check for duplicate urls

pull/386/head
Dave Machado 6 years ago
parent
commit
5fbf817c1c
1 changed files with 38 additions and 14 deletions
  1. +38
    -14
      build/validate_links.rb

+ 38
- 14
build/validate_links.rb View File

@@ -1,31 +1,55 @@
#!/usr/bin/env ruby
require 'faraday'
require 'httparty'
require 'uri'
allowed_codes = [200, 302, 403]
args = ARGV
filename = args[0]
fail_flag = false
contents = File.open(filename, 'rb') { |f| f.read }
links = URI.extract(contents, ['http', 'https'])
dup = links.select{|element| links.count(element) > 1 }
raw_links = URI.extract(contents, ['http', 'https'])
# Remove trailing ')' from entry URLs
links = []
raw_links.each do |link|
if link.end_with?(')')
links.push(link[0...-1])
else
links.push(link)
end
end
# Fail on any duplicate elements
dup = links.select{|element| links.count(element) > 1}
if dup.uniq.length > 0
dup.uniq.each do |link|
if link.end_with?(')')
puts link[0...-1]
end
dup.uniq.each do |e|
puts "Duplicate link: #{e}"
end
exit(1)
fail_flag = true
end
# Remove any duplicates from array
links = links.uniq
count = 0
total = links.length
fails = []
# GET each link and check for valid response code from allowed_codes
links.each do |link|
if link.end_with?(')')
link = link[0...-1]
end
res = Faraday.get(link)
if !allowed_codes.include?(res.status)
puts "(#{res.status}): #{link}"
begin
count += 1
puts "(#{count}/#{total}) #{link}"
res = HTTParty.get(link, timeout: 10)
if !allowed_codes.include?(res.code)
fails.push("(#{res.code}): #{link}")
fail_flag = true
else
puts "\t(#{res.code})"
end
rescue
puts "FAIL: (#{res.code}) #{link}"
fails.push("(#{res.code}): #{link}")
fail_flag = true
end
end
fails.each do |e|
puts e
end
if fail_flag
exit(1)
else


Loading…
Cancel
Save