1
0
mirror of https://github.com/lxsang/antd-web-apps synced 2025-07-26 10:39:46 +02:00

WIP: make code compatible with new SILK API
All checks were successful
gitea-sync/antd-web-apps/pipeline/head This commit looks good

This commit is contained in:
DanyLE
2023-04-26 18:51:03 +02:00
parent 93b6ca18ad
commit a76942f2f3
60 changed files with 1527 additions and 2845 deletions

View File

@ -1,345 +0,0 @@
local doclassify = {}
local st = require("stmr")
doclassify.bow = function(data, stopwords)
-- first step get a table of worlds that contain
-- world: occurences
local bag = {}
for w in data:gmatch('%w+') do
local word = w:lower()
if not stopwords[word] then
word = st.stmr(word)
if bag[word] then
bag[word].count = bag[word].count + 1
else
bag[word] = {count=0, tf=0, tfidf=0.0}
bag[word].count = 1
end
end
end
-- now calculate the tf of the bag
for k,v in pairs(bag) do
bag[k].tf = math.log(1 + bag[k].count)
end
return bag
end
doclassify.len = function(table)
local cnt = 0
for k,v in pairs(table) do cnt = cnt+1 end
return cnt
end
doclassify.tfidf = function(documents)
-- now for each term in a bag, calculate
-- the inverse document frequency, which
-- is a measure of how much information
-- the word provides, that is, whether the
-- term is common or rare across all documents
local ndoc = doclassify.len(documents)
for k,bag in pairs(documents) do
-- for eacht term in bag
-- calculate its idf across all documents
for term,b in pairs(bag) do
local n = 0
for id,doc in pairs(documents) do
if doc[term] then n = n+1 end
end
--echo("term:"..term.." appears in"..n.." documents")
b.tfidf = b.tf*math.log(ndoc/n)
end
end
end
doclassify.search = function(term, documents)
local r = {}
for id, doc in pairs(documents) do
if doc[term:lower()] then
r[id] = doc[term].tfidf
end
end
return r
end
doclassify.get_vectors = function(documents)
-- get a list of vector from documents
local index = 0
local vectors = {}
local maps = {}
local terms = {}
local maxv = 0
for id in pairs(documents) do
maps[id] = {}
vectors[id] = {}
end
-- first loop, get the term
for id, doc in pairs(documents) do
for k,v in pairs(doc) do
-- get max value
if v.tfidf > maxv then
maxv = v.tfidf
end
-- get the term
if not terms[k] then
index = index + 1
terms[k] = index
end
for pid in pairs(documents) do
if not maps[pid][k] then
if id == pid then
maps[pid][k] = v.tfidf
else
maps[pid][k] = 0
end
else
if maps[pid][k] == 0 and id == pid then
maps[pid][k] = v.tfidf
end
end
end
end
end
-- reindexing the vectors
for id in pairs(documents) do
for k,v in pairs(maps[id]) do
vectors[id][terms[k]] = v
end
end
--echo("Max tfidf "..maxv.." in document #"..maxid.." of term "..term)
return vectors, maxv, index, terms
end
doclassify.similarity = function(va, vb)
-- using cosin similarity
local dotp = 0
local maga = 0
local magb = 0
for k = 1,#va do
dotp = dotp + va[k]*vb[k]
maga = maga + va[k]*va[k]
magb = magb + vb[k]*vb[k]
end
maga = math.sqrt(maga)
magb = math.sqrt(magb)
local d = 0
if maga ~= 0 and magb ~= 0 then
d = dotp/ (magb*maga)
end
return d
end
doclassify.similarities = function(v1, collection)
local similarities = {}
assert(#v1 == #(collection[1]), "Incorrect vectors size")
for i=1,#collection do
similarities[i] = doclassify.similarity(v1, collection[i])
end
return similarities
end
doclassify.mean_similarity = function(v1, v2)
assert(#v1 == #v2, "Incorrect vectors size")
local similarities = {}
for i = 1,#v1 do similarities[i] = doclassify.similarity(v1[i], v2[i]) end
return doclassify.mean(similarities)
end
doclassify.similarity_chart = function(id, vectors)
local vs = {}
local cnt = 0
local lut = {}
for k,v in pairs(vectors) do
if k ~= id then
cnt = cnt + 1
vs[cnt] = v
lut[cnt] = k
end
end
if not vs[1] then return {} end
return doclassify.similarities(vectors[id], vs), lut
end
doclassify.top_similarity = function(id, vectors, n, th)
local chart,lut = doclassify.similarity_chart(id,vectors)
--echo(JSON.encode(chart))
--echo(JSON.encode(lut))
if not lut or #lut <= 0 then return nil end
local top = {}
local j=0
local goon = true
if not th then
goon = false
end
while j < n or goon
do
local i,maxv = doclassify.argmax(chart)
top[lut[i]] = maxv
chart[i] = 0.0
j=j+1
if maxv < th and goon then
goon = false
end
end
--for j=1,n do
-- local i,maxv = doclassify.argmax(chart)
-- top[lut[i]] = maxv
-- chart[i] = 0.0
--end
return top
end
doclassify.save_vectors = function(vectors, name)
local f = io.open(name,"w")
if f == nil then return false end
for id, v in pairs(vectors) do
f:write(id)
for i=1,#v do f:write(","..v[i]) end
f:write("\n")
end
f:close()
return true
end
doclassify.save_topchart = function(vectors, name,n)
local f = io.open(name,"w")
if f == nil then return false end
for k,v in pairs(vectors) do
local top = doclassify.top_similarity(k,vectors,n, 0.1)
for a,b in pairs(top) do
f:write(k.." "..a.." "..b.."\n")
end
end
f:close()
return true
end
doclassify.kmean = function(nclass, documents, maxstep, ids)
-- now
local vectors, maxv, size = doclassify.get_vectors(documents)
-- random centroids
local centroids = {}
local old_centroids = {}
local clusters = {}
--for pid in pairs(documents) do clusters[pid] = 0 end
-- add noise to mean_vector
for i = 1,nclass do
if ids == nil then
centroids[i] = doclassify.random(size,math.floor(maxv))
else
centroids[i] = vectors[ids[i]]
end
old_centroids[i] = doclassify.zeros(size)
end
-- loop until convergence or maxstep reached
local similarity = doclassify.mean_similarity(centroids, old_centroids)
local step = maxstep
while 1.0-similarity > 1e-9 and step > 0 do
clusters = {}
--echo(JSON.encode(centroids))
for id,v in pairs(vectors) do
local similarities = doclassify.similarities(v, centroids)
--echo(JSON.encode(similarities))
local cluster, maxvalue = doclassify.argmax(similarities)
--echo("doc #"..id.." is in clusters #"..cluster.." max value is "..maxvalue)
clusters[id] = cluster
end
-- storing the old centroids
old_centroids = centroids
-- calculate new centroids
local new_centroids = {}
for class in pairs(centroids) do
local cnt = 0
local cvectors = {}
for id,v in pairs(vectors) do
if clusters[id] == class then
cnt = cnt + 1
cvectors[cnt] = v
end
end
new_centroids[class] = doclassify.mean_vector(cvectors, size)
end
centroids = new_centroids
--echo(JSON.encode(centroids))
--echo(JSON.encode(old_centroids))
similarity = doclassify.mean_similarity(centroids, old_centroids)
echo("step #"..step..", similarity "..similarity)
step = step - 1
end
local results = {}
for i = 1,nclass do
local list = {}
local cnt = 0
for id,c in pairs(clusters) do
if c == i then
cnt = cnt + 1
list[cnt] = id
end
end
results[i] = list
end
return results, clusters, centroids
end
doclassify.zeros = function(n)
local vector = {}
for i = 1,n do vector[i] = 0.0 end
return vector
end
doclassify.random = function(n,maxv)
local vector = {}
for i=1,n do
vector[i] = math.random() + math.random(0, maxv)
end
return vector
end
doclassify.sum = function(v)
local sum = 0.0
for i=1,#v do sum = sum + v[i] end
return sum
end
doclassify.mean = function(v)
return doclassify.sum(v)/#v
end
doclassify.mean_vector = function(vectors, size)
local means = doclassify.zeros(size)
if not vectors or #vectors == 0 then return means end
--local size = #(vectors[1])
local times = 0
for k,v in pairs(vectors) do
for i=1,#v do means[i] = means[i] + v[i] end
times = times + 1
end
for i = 1,size do means[i] = means[i]/times end
return means
end
doclassify.argmin = function(v)
local minv = 0.0
local mini = 0.0
for i = 1,#v do
if v[i] <= minv then
mini = i
minv = v[i]
end
end
--echo("min index"..mini.." val "..minv)
return mini, minv
end
doclassify.argmax = function(v)
local maxv = 0.0
local maxi = 0.0
for i = 1,#v do
if v[i] >= maxv then
maxi = i
maxv = v[i]
end
end
return maxi,maxv
end
return doclassify

View File

@ -1,29 +0,0 @@
local gettext = {}
require("sqlite")
gettext.get = function(q)
local db = require("os.libs.dbmodel").get("mrsang","blogs",nil)
if not db then return nil end
local exp = {["="] =q}
local cond = {
exp = exp,
fields = {"id", "content"}
}
local data, sort = db:find(cond)
db:close()
if not data or #data == 0 then return nil end
--for k,v in pairs(data) do
-- data[k].content = bytes.__tostring(std.b64decode(data[k].content)):gsub("%%","%%%%")
--end
return data
end
gettext.stopwords = function(ospath)
--local ospath = require("fs/vfs").ospath(path)
local words = {}
for line in io.lines(ospath) do
words[line] = true
end
return words
end
return gettext

View File

@ -1,151 +0,0 @@
i
me
my
myself
we
our
ours
ourselves
you
your
yours
yourself
yourselves
he
him
his
himself
she
her
hers
herself
it
its
itself
they
them
their
theirs
themselves
what
which
who
whom
this
that
these
those
am
is
are
was
were
be
been
being
have
has
had
having
do
does
did
doing
a
an
the
and
but
if
or
because
as
until
while
of
at
by
for
with
about
against
between
into
through
during
before
after
above
below
to
from
up
down
in
out
on
off
over
under
again
further
then
once
here
there
when
where
why
how
all
any
both
each
few
more
most
other
some
such
no
nor
not
only
own
same
so
than
too
very
s
t
can
will
just
don
should
now
a
b
c
d
e
f
g
h
i
j
k
l
m
n
o
p
q
w
r
s
t
x
y
z

View File

@ -1,50 +0,0 @@
local path = require("fs/vfs").ospath("home://aiws/blog-clustering")
local gettext = loadfile(path.."/gettext.lua")()
local cluster = loadfile(path.."/cluster.lua")()
local refresh = false
local file = "/home/mrsang/test.csv"
if refresh then
local data = gettext.get({publish=1})
local documents = {}
if data then
local sw = gettext.stopwords("home://aiws/blog-clustering/stopwords.txt")
for k,v in pairs(data) do
local bag = cluster.bow(data[k].content, sw)
documents[data[k].id] = bag
end
cluster.tfidf(documents)
--local v = cluster.search("arm", documents)
--echo(JSON.encode(v))
local vectors, maxv, size = cluster.get_vectors(documents)
local s = cluster.save_topchart(vectors,file, 3)
if s then echo("file saved") else echo("error save file") end
--echo(JSON.encode(r))
--r = cluster.similarity(vectors["14"],vectors["16"])
--echo("Similarity "..r)
--local c,l = cluster.kmean(3, documents, 10)
--echo(JSON.encode(c))
--echo(JSON.encode(l))
else
echo("Data missing")
end
else
local f = io.open(file,"r")
local result = {}
for line in f:lines() do
local arr = {}
local cnt = 0
for i in line:gmatch( "%S+") do
cnt = cnt + 1
arr[cnt] = i
end
if not result[arr[1]] then result[arr[1]] = {} end
result[arr[1]][arr[2]] = tonumber(arr[3])
end
f:close()
echo(JSON.encode(result))
--local r = cluster.top_similarity("2",vectors, 3)
--echo(JSON.encode(r))
end

1135
blog/assets/afx.css Normal file

File diff suppressed because it is too large Load Diff

1
blog/assets/afx.js Normal file

File diff suppressed because one or more lines are too long

View File

@ -260,10 +260,43 @@ button {
white-space: -o-pre-wrap; /* Opera 7 */
word-wrap: break-word; /* Internet Explorer 5.5+ */
}
#container .blogentry a {
.search-result {
color: #24292e;
}
.search-result ul {
list-style: none;
margin: 0;
padding: 0;
}
.search-result ul li{
margin: 0;
}
.search-result ul li b {
color: #878887;
}
.search-result ul li p.title
{
font-size: 16px;
}
.search-result ul li p.preview {
margin: 0;
padding: 0;
padding-left: 20px;
}
#container .blogentry a,
.search-result a {
text-decoration: none;
color: #3170b2;
}
.search-result h2 {
font-size: 18px;
text-align: left;
padding-bottom: 0.3em;
border-bottom: 1px solid #eaecef;
margin-bottom: 16px;
font-weight: 600;
line-height: 1.25;
}
#container .blogentry hr {
display: block;
height: 1px;
@ -330,6 +363,9 @@ button {
display: block;
margin: 0 auto;
}
form.search-form {
display: contents;
}
input.search-box {
flex: 1;
padding: 0;

View File

@ -6,6 +6,34 @@ BaseController:subclass(
}
)
local tools = {}
tools.sum = function(v)
local sum = 0.0
for i=1,#v do sum = sum + v[i] end
return sum
end
tools.mean = function(v)
return tools.sum(v)/#v
end
tools.argmax = function(v)
local maxv = 0.0
local maxi = 0.0
for i = 1,#v do
if v[i] >= maxv then
maxi = i
maxv = v[i]
end
end
return maxi,maxv
end
tools.cmp = function(a,b)
return a[2] > b[2]
end
function PostController:index(...)
return self:top(table.unpack({...}))
end
@ -22,7 +50,7 @@ end
function PostController:afterof(id, limit)
limit = limit or POST_LIMIT
local data, order = self.blog:fetch({[">"] = {id = id}}, limit, {ctime = "ASC"})
local data, order = self.blog:fetch({["id$gt"] = tonumber(id)}, limit, { "ctime$asc"})
if not data or #order == 0 then
return self:notfound("No entry found")
end
@ -36,9 +64,68 @@ function PostController:afterof(id, limit)
return true
end
function PostController:search(...)
local index_file = DB_FILE..".index.json"
local st = require("stmr")
local indexes, err_code = JSON.decodeFile(index_file)
local terms = REQUEST.q
if not err_code then
-- prepare the vectors
local docs = {}
local tid = 1
local tokens = {}
local search_vector = {}
for word in string.gmatch(terms,'%w+') do
local token = st.stmr(word:lower())
local index = indexes[token]
if index then
for id,v in pairs(index) do
if not docs[id] then
docs[id] = {}
end
docs[id][token] = v
end
tokens[tid] = token
tid = tid + 1
end
end
--echo(JSON.encode(docs))
--echo(JSON.encode(tokens))
-- now create one vector for each documents
local mean_tfidf = {}
for id,doc in pairs(docs) do
local vector = {}
for i,token in ipairs(tokens) do
if doc[token] then
vector[i] = doc[token]
else
vector[i] = 0
end
end
local data, order = self.blog:find({
where = {id = tonumber(id)},
fields = {"id", "title", "utime", "ctime", "content"}
})
if data and data[1] then
data[1].content = data[1].content:sub(1,255)
table.insert(mean_tfidf, {id, tools.mean(vector), data[1]})
end
end
table.sort(mean_tfidf, tools.cmp)
self.template:setView("search")
self.template:set("result", mean_tfidf)
self.template:set("title", "Search result")
return true
else
LOG_ERROR("Unable to parse file %s", index_file)
return self:notfound("Internal search error")
end
end
function PostController:beforeof(id, limit)
limit = limit or POST_LIMIT
local data, order = self.blog:fetch({["<"] = {id = id}}, limit)
local data, order = self.blog:fetch({["id$lt"] = tonumber(id)}, limit)
if not data or #order == 0 then
return self:notfound("No entry found")
end
@ -58,15 +145,15 @@ function PostController:list(data, order)
end
function PostController:bytag(b64tag, limit, action, id)
local tag = bytes.__tostring(std.b64decode(b64tag .. "=="))
local cond = {["LIKE"] = {tags = "%%" .. tag .. "%%"}}
local tag = tostring(enc.b64decode(b64tag .. "=="))
local cond = {["tags$like"] = "%%"..tag.."%%"}
local order = nil
limit = limit or POST_LIMIT
if action == "before" then
cond = {["and"] = {cond, {["<"] = {id = id}}}}
cond["id$lt"] = tonumber(id)
elseif action == "after" then
cond = {["and"] = {cond, {[">"] = {id = id}}}}
order = {ctime = "ASC"}
cond["id$gt"] = tonumber(id)
order = {"ctime$asc"}
end
local data, sort = self.blog:fetch(cond, limit, order)
if not data or #sort == 0 then
@ -93,7 +180,7 @@ function PostController:json(id)
error = false,
result = false
}
local data, order = self.blog:fetch({["="] = {id = id}})
local data, order = self.blog:fetch({id = tonumber(id)})
if not data or #order == 0 then
obj.error = "No data found"
else
@ -126,7 +213,7 @@ function PostController:json(id)
end
function PostController:id(pid)
local data, order = self.blog:fetch({["="] = {id = pid}})
local data, order = self.blog:fetch({id = tonumber(pid)})
if not data or #order == 0 then
return self:notfound("No post found")
end
@ -149,7 +236,8 @@ function PostController:id(pid)
self.template:set("similar_posts", similar_posts)
self.template:set("render", true)
self.template:set("tags", data.tags)
self.template:set("url", HTTP_ROOT .. "/post/id/" .. pid)
self.template:set("url", string.format(HTTP_ROOT .. "/post/id/%d",pid))
-- self.template:set("url", string.format("https://blog.lxsang.me/post/id/%d",pid))
self.template:setView("detail")
return true
end
@ -168,7 +256,7 @@ function PostController:actionnotfound(...)
end
function PostController:graph_json(...)
local nodes = self.blog:find({exp= { ["="] = { publish = 1}}, fields = {"id", "title"}})
local nodes = self.blog:find({ where = {publish = 1}, fields = {"id", "title"}})
local output = { error = false, result = false }
local lut = {}
std.json()
@ -195,7 +283,7 @@ function PostController:graph_json(...)
else
key = v.sid..v.pid
end
key = std.sha1(key)
key = enc.sha1(key)
if not lut[key] then
output.result.links[i] = link
i = i + 1
@ -212,40 +300,3 @@ function PostController:graph(...)
self.template:set("d3", true)
return true
end
function PostController:analyse(n)
if not n then
n = 5
end
local path = WWW_ROOT..DIR_SEP.."ai"
local gettext = loadfile(path .. "/gettext.lua")()
local cluster = loadfile(path .. "/cluster.lua")()
local data = gettext.get({publish = 1})
local documents = {}
if data then
local sw = gettext.stopwords(path .. "/stopwords.txt")
for k, v in pairs(data) do
local bag = cluster.bow(data[k].content, sw)
documents[data[k].id] = bag
end
cluster.tfidf(documents)
--local v = cluster.search("arm", documents)
--echo(JSON.encode(v))
local vectors, maxv, size = cluster.get_vectors(documents)
-- purge the table
self.analytical:delete({["="] = {["1"] = 1}})
-- get similarity and put to the table
for id, v in pairs(vectors) do
local top = cluster.top_similarity(id, vectors, tonumber(n), 0.1)
for a, b in pairs(top) do
local record = {pid = id, sid = a, score = b}
self.analytical:create(record)
end
end
self.template:set("message", "Analyse complete")
else
self.template:set("message", "Cannot analyse")
end
self.template:set("title", "TFIDF-analyse")
return true
end

View File

@ -21,7 +21,7 @@ function ServiceController:sendmail()
fail("unknown request")
end
local rq = (JSON.decodeString(REQUEST.json))
local to = "mrsang@lxsang.me"
local to = "mrsang@iohub.dev"
local from = "From: " .. rq.email .. "\n"
local suject = "Subject: " .. rq.subject .. "\n"
local content = "Contact request from:" .. rq.name .. "\n Email: " .. rq.email .. "\n" .. rq.content .. "\n"
@ -45,7 +45,7 @@ function ServiceController:subscribe()
end
local rq = (JSON.decodeString(REQUEST.json))
-- check if email is exist
local data = self.subscribers:find({exp = {["="] = {email = rq.email}}})
local data = self.subscribers:find({where = {email = rq.email}})
if data and #data > 0 then
fail("You are already/previously subscribed")
else

View File

@ -9,5 +9,8 @@ BaseModel:subclass("AnalyticalModel",{
})
function AnalyticalModel:similarof(id)
return self:find({ exp = {["="] = {pid = id}}, order = {score = "DESC"}})
return self:find({
where = {pid = id},
order = { "score$desc"}
})
end

View File

@ -15,28 +15,27 @@ BaseModel:subclass("BlogModel",{
})
function BlogModel:fetch(cnd, limit, order)
local exp = {}
exp[1] = {["="] = { publish = 1 }}
if cnd then
exp[2] = cnd
else
end
local cond = {
exp = {["and"] = exp },
order = { ctime = "DESC" },
local filter = {
order = { "ctime$desc" },
fields = {
"id", "title", "utime", "ctime", "utimestr", "content", "ctimestr", "rendered", "tags"
}
}
}
if limit then
cond.limit = limit
filter.limit = limit
end
if order then
cond.order = order
filter.order = order
end
return self:find(cond)
filter.where = {}
if cnd then
filter.where = cnd
end
filter.where.publish = 1
return self:find(filter)
end
function BlogModel:minid()

View File

@ -1,26 +1,30 @@
-- the rewrite rule for the framework
-- should be something like this
-- ^\/apps\/+(.*)$ = /apps/router.lua?r=<1>&<query>
-- some global variables
DIR_SEP = "/"
package.path = _SERVER["LIB_DIR"].."/lua/?.lua"
require("silk.api")
-- crypto lib
enc = require("enc")
WWW_ROOT = __ROOT__.."/blog"
-- TODO: change me
DB_FILE = "/home/dany/databases/mrsang.db"
-- add aditional paths
package.path = package.path..";"..WWW_ROOT .. '/?.lua'
DIR_SEP = "/"
if HEADER.Host then
HTTP_ROOT= "https://"..HEADER.Host
else
HTTP_ROOT = "https://blog.lxsang.me"
HTTP_ROOT = "https://blog.iohub.dev"
end
-- class path: path.to.class
BASE_FRW = ""
-- class path: path.to.class
CONTROLLER_ROOT = BASE_FRW.."blog.controllers"
MODEL_ROOT = BASE_FRW.."blog.models"
-- TODO remove me
HTTP_ROOT = HTTP_ROOT.."/next/blog"
CONTROLLER_ROOT = "blog.controllers"
MODEL_ROOT = "blog.models"
-- file path: path/to/file
VIEW_ROOT = WWW_ROOT..DIR_SEP.."views"
LOG_ROOT = WWW_ROOT..DIR_SEP.."logs"
POST_LIMIT = 10
-- require needed library
require(BASE_FRW.."silk.api")
POST_LIMIT = 3
if REQUEST.r then
REQUEST.r = REQUEST.r:gsub("%:", "/")
@ -29,8 +33,8 @@ end
-- registry object store global variables
local REGISTRY = {}
-- set logging level
REGISTRY.logger = Logger:new{ levels = {INFO = false, ERROR = true, DEBUG = false}}
REGISTRY.db = DBHelper:new{db="mrsang"}
REGISTRY.logger = Logger:new{ level = Logger.INFO}
REGISTRY.db = DBModel:new{db=DB_FILE}
REGISTRY.layout = 'default'
REGISTRY.fileaccess = true

View File

@ -1,6 +1,7 @@
<?lua
local chat_uri="https://chat.iohub.dev/comment"
local title = __main__:get("title")
local render = __main__:get("render")
local url = __main__:get("url")
@ -18,17 +19,20 @@
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" type="text/css" href="<?=HTTP_ROOT?>/rst/ubuntu-regular.css" />
<link rel="stylesheet" type="text/css" href="<?=HTTP_ROOT?>/rst/font-awesome.css" />
<link rel="stylesheet" type="text/css" href="<?=HTTP_ROOT?>/rst/afx.css" />
<link rel="stylesheet" type="text/css" href="https://fonts.googleapis.com/css?family=Ubuntu:regular,bold&subset=Latin" />
<link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.css" />
<link rel="stylesheet" type="text/css" href="<?=HTTP_ROOT?>/assets/afx.css" />
<link rel="stylesheet" type="text/css" href="<?=HTTP_ROOT?>/assets/style.css" />
<link rel="stylesheet" type="text/css" href="<?=HTTP_ROOT?>/assets/github-markdown.css" />
<link rel="stylesheet" type="text/css" href="https://chat.iohub.dev/assets/quicktalk.css" />
<script src="https://chat.iohub.dev/assets/quicktalk.js"> </script>
<!--link rel="stylesheet" type="text/css" href="https://app.iohub.dev/next/talk/assets/quicktalk.css" />
<script src="https://app.iohub.dev/next/talk/assets/quicktalk.js"> </script-->
<script src="<?=HTTP_ROOT?>/rst/afx.js"> </script>
<script src="<?=HTTP_ROOT?>/rst/gscripts/jquery-3.4.1.min.js"> </script>
<script src="<?=HTTP_ROOT?>/assets/afx.js"> </script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.4.1/jquery.min.js"> </script>
<script src="<?=HTTP_ROOT?>/assets/main.js"></script>
<?lua if d3 then ?>
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/6.5.0/d3.min.js" ></script>
@ -41,8 +45,8 @@
<meta property="og:image" content="" />
<?lua if render then ?>
<meta name="twitter:card" content="summary" />
<meta name="twitter:site" content="@blog.lxsang.me" />
<meta name="twitter:creator" content="@lexsang" />
<meta name="twitter:site" content="@blog.iohub.dev" />
<meta name="twitter:creator" content="@DanyLE" />
<meta property="og:url" content="<?=url?>" />
<meta property="og:type" content="article" />
<meta property="og:title" content="<?=title?>" />
@ -54,9 +58,9 @@
<script src="<?=HTTP_ROOT?>/rst/katex/katex.min.js"> </script>
<script src="<?=HTTP_ROOT?>/rst/katex/auto-render.min.js"> </script>
<?lua else ?>
<meta property="og:url" content="https://blog.lxsang.me" />
<meta property="og:url" content="https://blog.iohub.dev" />
<meta property="og:type" content="article" />
<meta property="og:title" content="Xuan Sang LE's blog" />
<meta property="og:title" content="Dany LE's blog" />
<meta property="og:description" content="Blog Home" />
<?lua end ?>
<script>
@ -66,7 +70,7 @@
var options = {
target: "quick_talk_comment_thread",
page: "desktop",
api_uri: "https://chat.iohub.dev/comment",
api_uri: "<?=chat_uri?>",
uri: "<?=url?>",
author: {
first: "mrsang",
@ -122,22 +126,25 @@
<div id = "top">
<div id = "navbar" class = "<?=cls?>">
<div class = "logo"><a href = "https://lxsang.me"></a></div>
<div class = "logo"><a href = "https://iohub.dev"></a></div>
<ul>
<li><i class = "fa fa-home"></i><a href="<?=HTTP_ROOT?>">Home</a></li>
<?lua
if not HEADER.mobile then
?>
<li > <i class = "fa fa-globe"></i><a href = "/post/graph">Explore</a></li>
<li > <i class = "fa fa-globe"></i><a href = "<?=HTTP_ROOT?>/post/graph">Explore</a></li>
<li> <i class = "fa fa-paper-plane"></i><a href="#" onclick="subscribe('<?=HTTP_ROOT?>')">Subscribe</a></li>
<?lua end ?>
<li ><i class = "fa fa-address-card"></i><a href="https://info.lxsang.me" >Portfolio</a></li>
<li ><i class = "fa fa-address-card"></i><a href="https://info.iohub.dev" >Portfolio</a></li>
<li><i class = "fa fa-envelope"></i><a href="#" onclick="mailtoMe('<?=HTTP_ROOT?>')" >Contact</a></li>
</ul>
<?lua
if not HEADER.mobile then
?>
<input type = "text" class = "search-box"></input>
<form class="search-form" method="get" action="<?=HTTP_ROOT?>/post/search">
<input type = "text" class = "search-box" name="q"></input>
<input type="submit" hidden ></input>
</form>
<div class= "search-icon"></div>
<?lua
end

View File

@ -14,9 +14,9 @@
local atags = {}
local i = 1
for tag in data.tags:gmatch(",*([^,]+)") do
tag = std.trim(tag, " ")
tag = ulib.trim(tag, " ")
if tag ~= "" then
local b64tag = std.b64encode(tag)
local b64tag = enc.b64encode(tag)
atags[i] = '<a href = "'..HTTP_ROOT..'/post/bytag/'..b64tag:gsub("=","")..'/'..POST_LIMIT..'">'..tag.."</a>"
i = i+ 1
end

View File

@ -23,16 +23,16 @@
local atags = {}
local i = 1
for tag in data.tags:gmatch(",*([^,]+)") do
tag = std.trim(tag, " ")
tag = ulib.trim(tag, " ")
if tag ~= "" then
local b64tag = std.b64encode(tag)
local b64tag = enc.b64encode(tag)
atags[i] = '<a href = "'..HTTP_ROOT..'/post/bytag/'..b64tag:gsub("=","")..'/'..POST_LIMIT..'">'..tag.."</a>"
i = i+ 1
end
end
echo(table.concat(atags, ", "))
local url = HTTP_ROOT.."/post/id/"..data.id
local old_url = HTTP_ROOT.."/r:id:"..data.id
local url = HTTP_ROOT.."/post/id/"..string.format("%d",data.id)
local old_url = HTTP_ROOT.."/r:id:"..string.format("%d",data.id)
?>
</span>
<!--div class="fb-like" data-href="<?=old_url?>" data-layout="button_count" data-action="like" data-size="small" data-show-faces="true" data-share="true"></div-->
@ -58,7 +58,7 @@
end
if title then
echo(content:sub(0, b))
echo("<a class = 'title_link' href='"..HTTP_ROOT.."/post/id/"..data.id.."'>"..title.."</a>")
echo("<a class = 'title_link' href='"..url.."'>"..title.."</a>")
echo(content:sub(c))
else
echo(content)
@ -67,18 +67,18 @@
</div>
<div class = "detail">
<span></span>
<?='<a href="'..HTTP_ROOT..'/post/id/'..data.id..'" ></a>'?>
<?='<a href="'..url..'" ></a>'?>
<span></span>
</div>
</div>
</div>
<?lua
end
local beforelk = HTTP_ROOT.."/post/beforeof/"..first_id.."/"..POST_LIMIT
local afterlk = HTTP_ROOT.."/post/afterof/"..last_id.."/"..POST_LIMIT
local beforelk = HTTP_ROOT.."/post/beforeof/"..string.format("%d",first_id).."/"..POST_LIMIT
local afterlk = HTTP_ROOT.."/post/afterof/"..string.format("%d",last_id).."/"..POST_LIMIT
if action == "bytag" or action == "search" then
beforelk = HTTP_ROOT.."/post/"..action.."/"..query.."/"..POST_LIMIT.."/before/"..first_id
afterlk = HTTP_ROOT.."/post/"..action.."/"..query.."/"..POST_LIMIT.."/after/"..last_id
beforelk = HTTP_ROOT.."/post/"..action.."/"..query.."/"..POST_LIMIT.."/before/"..string.format("%d",first_id)
afterlk = HTTP_ROOT.."/post/"..action.."/"..query.."/"..POST_LIMIT.."/after/"..string.format("%d",last_id)
end
?>
<div class = "time-travel">

View File

@ -0,0 +1,17 @@
<div class="search-result">
<h2>Posts matched for query: <?=REQUEST.q?></h2>
<ul>
<?lua
for i,v in ipairs(result) do
?>
<li>
<p class="title">
<b>Score <?=string.format("%.3f",v[2])?></b> <a href="<?=HTTP_ROOT?>/post/id/<?=v[3].id?>"><?=v[3].title?></a>
</p>
<p class="preview">
<?=v[3].content?>...
</p>
</li>
<?lua end ?>
</ul>
</div>