luasocket/etc/check-links.lua

112 lines
3.3 KiB
Lua
Raw Permalink Normal View History

2003-03-28 22:08:50 +01:00
-----------------------------------------------------------------------------
-- Little program that checks links in HTML files, using coroutines and
-- non-blocking I/O via the dispatcher module.
-- LuaSocket sample files
-- Author: Diego Nehab
2003-03-28 22:08:50 +01:00
-----------------------------------------------------------------------------
2005-09-30 00:26:35 +02:00
local url = require("socket.url")
local dispatch = require("dispatch")
local http = require("socket.http")
dispatch.TIMEOUT = 10
-- make sure the user knows how to invoke us
arg = arg or {}
if #arg < 1 then
print("Usage:\n luasocket check-links.lua [-n] {<url>}")
exit()
end
-- '-n' means we are running in non-blocking mode
if arg[1] == "-n" then
-- if non-blocking I/O was requested, use real dispatcher interface
table.remove(arg, 1)
handler = dispatch.newhandler("coroutine")
else
-- if using blocking I/O, use fake dispatcher interface
handler = dispatch.newhandler("sequential")
end
local nthreads = 0
-- get the status of a URL using the dispatcher
function getstatus(link)
local parsed = url.parse(link, {scheme = "file"})
if parsed.scheme == "http" then
nthreads = nthreads + 1
handler:start(function()
local r, c, h, s = http.request{
method = "HEAD",
url = link,
create = handler.tcp
}
if r and c == 200 then io.write('\t', link, '\n')
else io.write('\t', link, ': ', tostring(c), '\n') end
nthreads = nthreads - 1
end)
end
end
2001-09-27 22:02:58 +02:00
function readfile(path)
path = url.unescape(path)
local file, error = io.open(path, "r")
if file then
2004-03-26 01:18:41 +01:00
local body = file:read("*a")
file:close()
2001-09-27 22:02:58 +02:00
return body
else return nil, error end
end
function load(u)
local parsed = url.parse(u, { scheme = "file" })
2004-03-26 01:18:41 +01:00
local body, headers, code, error
2004-06-04 17:15:45 +02:00
local base = u
if parsed.scheme == "http" then
2004-06-17 00:51:04 +02:00
body, code, headers = http.request(u)
if code == 200 then
-- if there was a redirect, update base to reflect it
2005-08-12 07:56:32 +02:00
base = headers.location or base
2001-09-27 22:02:58 +02:00
end
if not body then
2004-06-17 00:51:04 +02:00
error = code
end
elseif parsed.scheme == "file" then
body, error = readfile(parsed.path)
else error = string.format("unhandled scheme '%s'", parsed.scheme) end
2001-09-27 22:02:58 +02:00
return base, body, error
end
function getlinks(body, base)
-- get rid of comments
body = string.gsub(body, "%<%!%-%-.-%-%-%>", "")
2001-09-27 22:02:58 +02:00
local links = {}
-- extract links
body = string.gsub(body, '[Hh][Rr][Ee][Ff]%s*=%s*"([^"]*)"', function(href)
2004-06-04 17:15:45 +02:00
table.insert(links, url.absolute(base, href))
2001-09-27 22:02:58 +02:00
end)
body = string.gsub(body, "[Hh][Rr][Ee][Ff]%s*=%s*'([^']*)'", function(href)
2004-06-04 17:15:45 +02:00
table.insert(links, url.absolute(base, href))
2001-09-27 22:02:58 +02:00
end)
string.gsub(body, "[Hh][Rr][Ee][Ff]%s*=%s*(.-)>", function(href)
2004-06-04 17:15:45 +02:00
table.insert(links, url.absolute(base, href))
2001-09-27 22:02:58 +02:00
end)
return links
end
function checklinks(address)
local base, body, error = load(address)
2001-09-27 22:02:58 +02:00
if not body then print(error) return end
print("Checking ", base)
2001-09-27 22:02:58 +02:00
local links = getlinks(body, base)
for _, link in ipairs(links) do
getstatus(link)
2001-09-27 22:02:58 +02:00
end
end
for _, address in ipairs(arg) do
checklinks(url.absolute("file:", address))
2001-09-27 22:02:58 +02:00
end
while nthreads > 0 do
handler:step()
end