Merge branch 'v3'
This commit is contained in:
commit
cc7decfd34
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -32,7 +32,6 @@ Recursive =
|
||||
|
||||
apply: (recursive, post, args...) ->
|
||||
{fullID} = post
|
||||
for ID, post of g.posts
|
||||
g.posts.forEach (post) ->
|
||||
if fullID in post.quotes
|
||||
recursive post, args...
|
||||
return
|
||||
|
||||
@ -40,22 +40,28 @@ Get =
|
||||
allQuotelinksLinkingTo: (post) ->
|
||||
# Get quotelinks & backlinks linking to the given post.
|
||||
quotelinks = []
|
||||
handleQuotes = (post, type) ->
|
||||
quotelinks.push post.nodes[type]...
|
||||
quotelinks.push clone.nodes[type]... for clone in post.clones
|
||||
{posts} = g
|
||||
fullID = {post}
|
||||
handleQuotes = (qPost, type) ->
|
||||
quotelinks.push qPost.nodes[type]...
|
||||
quotelinks.push clone.nodes[type]... for clone in qPost.clones
|
||||
return
|
||||
# First:
|
||||
# In every posts,
|
||||
# if it did quote this post,
|
||||
# get all their backlinks.
|
||||
handleQuotes quoterPost, 'quotelinks' for ID, quoterPost of g.posts when post.fullID in quoterPost.quotes
|
||||
posts.forEach (qPost) ->
|
||||
if fullID in qPost.quotes
|
||||
handleQuotes qPost, 'quotelinks'
|
||||
|
||||
# Second:
|
||||
# If we have quote backlinks:
|
||||
# in all posts this post quoted
|
||||
# and their clones,
|
||||
# get all of their backlinks.
|
||||
if Conf['Quote Backlinks']
|
||||
handleQuotes quotedPost, 'backlinks' for quote in post.quotes when quotedPost = g.posts[quote]
|
||||
handleQuotes qPost, 'backlinks' for quote in post.quotes when qPost = posts[quote]
|
||||
|
||||
# Third:
|
||||
# Filter out irrelevant quotelinks.
|
||||
quotelinks.filter (quotelink) ->
|
||||
|
||||
@ -9,10 +9,7 @@ doc = d.documentElement
|
||||
g =
|
||||
VERSION: '<%= version %>'
|
||||
NAMESPACE: '<%= meta.name.replace(' ', '_') %>.'
|
||||
TYPE: 'sfw'
|
||||
boards: {}
|
||||
threads: {}
|
||||
posts: {}
|
||||
|
||||
Mascots =
|
||||
'Akiyama_Mio':
|
||||
|
||||
@ -2,6 +2,8 @@ Index =
|
||||
init: ->
|
||||
return if g.BOARD.ID is 'f' or g.VIEW is 'catalog' or !Conf['JSON Navigation']
|
||||
|
||||
@board = "#{g.BOARD}"
|
||||
|
||||
@button = $.el 'a',
|
||||
className: 'index-refresh-shortcut fa'
|
||||
title: 'Refresh Index'
|
||||
@ -113,12 +115,13 @@ Index =
|
||||
scroll: $.debounce 100, ->
|
||||
return if Index.req or Conf['Index Mode'] isnt 'infinite' or (doc.scrollTop <= doc.scrollHeight - (300 + window.innerHeight)) or g.VIEW is 'thread'
|
||||
Index.pageNum = Index.getCurrentPage() unless Index.pageNum? # Avoid having to pushState to keep track of the current page
|
||||
|
||||
pageNum = Index.pageNum++
|
||||
return Index.endNotice() if pageNum >= Index.pagesNum
|
||||
nodesPerPage = Index.threadsNumPerPage * 2
|
||||
nodes = Index.sortedNodes[nodesPerPage * pageNum ... nodesPerPage * (pageNum + 1)]
|
||||
Index.buildReplies nodes if Conf['Show Replies']
|
||||
$.add Index.root, nodes
|
||||
|
||||
nodes = Index.buildSinglePage pageNum
|
||||
Index.buildReplies nodes if Conf['Show Replies']
|
||||
Index.buildStructure nodes
|
||||
Index.setPage pageNum
|
||||
|
||||
endNotice: do ->
|
||||
@ -240,7 +243,7 @@ Index =
|
||||
onabort: onload
|
||||
onloadend: onload
|
||||
,
|
||||
whenModified: true
|
||||
whenModified: Index.board is "#{g.BOARD}"
|
||||
$.addClass Index.button, 'fa-spin'
|
||||
|
||||
load: (e, pageNum) ->
|
||||
@ -266,13 +269,16 @@ Index =
|
||||
new Notice 'warning', err, 1
|
||||
return
|
||||
|
||||
Navigate.title()
|
||||
Index.board = "#{g.BOARD}"
|
||||
|
||||
try
|
||||
if req.status is 200
|
||||
Index.parse JSON.parse(req.response), pageNum
|
||||
else if req.status is 304 and pageNum?
|
||||
Index.pageNav pageNum
|
||||
catch err
|
||||
c.error 'Index failure:', err
|
||||
c.error "Index failure: #{err.message}", err.stack
|
||||
# network error or non-JSON content for example.
|
||||
if notice
|
||||
notice.setType 'error'
|
||||
@ -303,8 +309,8 @@ Index =
|
||||
Index.threadsNumPerPage = pages[0].threads.length
|
||||
Index.liveThreadData = pages.reduce ((arr, next) -> arr.concat next.threads), []
|
||||
Index.liveThreadIDs = Index.liveThreadData.map (data) -> data.no
|
||||
for threadID, thread of g.BOARD.threads when thread.ID not in Index.liveThreadIDs
|
||||
thread.collect()
|
||||
g.BOARD.threads.forEach (thread) ->
|
||||
thread.collect() unless thread.ID in Index.liveThreadIDs
|
||||
return
|
||||
|
||||
buildThreads: ->
|
||||
@ -312,23 +318,23 @@ Index =
|
||||
threads = []
|
||||
posts = []
|
||||
for threadData, i in Index.liveThreadData
|
||||
threadRoot = Build.thread g.BOARD, threadData
|
||||
Index.nodes.push threadRoot, $.el 'hr'
|
||||
if thread = g.BOARD.threads[threadData.no]
|
||||
thread.setPage Math.floor i / Index.threadsNumPerPage
|
||||
thread.setStatus 'Sticky', !!threadData.sticky
|
||||
thread.setStatus 'Closed', !!threadData.closed
|
||||
else
|
||||
thread = new Thread threadData.no, g.BOARD
|
||||
threads.push thread
|
||||
continue if thread.ID of thread.posts
|
||||
try
|
||||
threadRoot = Build.thread g.BOARD, threadData
|
||||
if thread = g.BOARD.threads[threadData.no]
|
||||
thread.setPage Math.floor i / Index.threadsNumPerPage
|
||||
thread.setStatus 'Sticky', !!threadData.sticky
|
||||
thread.setStatus 'Closed', !!threadData.closed
|
||||
else
|
||||
thread = new Thread threadData.no, g.BOARD
|
||||
threads.push thread
|
||||
Index.nodes.push threadRoot
|
||||
continue if thread.ID of thread.posts
|
||||
posts.push new Post $('.opContainer', threadRoot), thread, g.BOARD
|
||||
catch err
|
||||
# Skip posts that we failed to parse.
|
||||
errors = [] unless errors
|
||||
errors.push
|
||||
message: "Parsing of Post No.#{thread} failed. Post will be skipped."
|
||||
message: "Parsing of Thread No.#{thread} failed. Thread will be skipped."
|
||||
error: err
|
||||
Main.handleErrors errors if errors
|
||||
|
||||
@ -340,7 +346,7 @@ Index =
|
||||
|
||||
buildReplies: (threadRoots) ->
|
||||
posts = []
|
||||
for threadRoot in threadRoots by 2
|
||||
for threadRoot in threadRoots
|
||||
thread = Get.threadFromRoot threadRoot
|
||||
i = Index.liveThreadIDs.indexOf thread.ID
|
||||
continue unless lastReplies = Index.liveThreadData[i].last_replies
|
||||
@ -364,52 +370,89 @@ Index =
|
||||
Main.callbackNodes Post, posts
|
||||
|
||||
sort: ->
|
||||
switch Conf['Index Sort']
|
||||
when 'bump'
|
||||
sortedThreadIDs = Index.liveThreadIDs
|
||||
when 'lastreply'
|
||||
sortedThreadIDs = [Index.liveThreadData...].sort((a, b) ->
|
||||
a = a.last_replies[a.last_replies.length - 1] if 'last_replies' of a
|
||||
b = b.last_replies[b.last_replies.length - 1] if 'last_replies' of b
|
||||
{liveThreadIDs, liveThreadData} = Index
|
||||
sortedThreadIDs = {
|
||||
lastreply:
|
||||
[liveThreadData...].sort((a, b) ->
|
||||
a = num[num.length - 1] if (num = a.last_replies)
|
||||
b = num[num.length - 1] if (num = b.last_replies)
|
||||
b.no - a.no
|
||||
).map (data) -> data.no
|
||||
when 'birth'
|
||||
sortedThreadIDs = [Index.liveThreadIDs...].sort (a, b) -> b - a
|
||||
when 'replycount'
|
||||
sortedThreadIDs = [Index.liveThreadData...].sort((a, b) -> b.replies - a.replies).map (data) -> data.no
|
||||
when 'filecount'
|
||||
sortedThreadIDs = [Index.liveThreadData...].sort((a, b) -> b.images - a.images).map (data) -> data.no
|
||||
Index.sortedNodes = []
|
||||
).map (post) -> post.no
|
||||
bump: liveThreadIDs
|
||||
birth: [liveThreadIDs... ].sort (a, b) -> b - a
|
||||
replycount: [liveThreadData...].sort((a, b) -> b.replies - a.replies).map (post) -> post.no
|
||||
filecount: [liveThreadData...].sort((a, b) -> b.images - a.images ).map (post) -> post.no
|
||||
}[Conf['Index Sort']]
|
||||
Index.sortedNodes = sortedNodes = new RandomAccessList
|
||||
{nodes} = Index
|
||||
for threadID in sortedThreadIDs
|
||||
i = Index.liveThreadIDs.indexOf(threadID) * 2
|
||||
Index.sortedNodes.push Index.nodes[i], Index.nodes[i + 1]
|
||||
if Index.isSearching
|
||||
Index.sortedNodes = Index.querySearch(Index.searchInput.value) or Index.sortedNodes
|
||||
# Sticky threads
|
||||
Index.sortOnTop (thread) -> thread.isSticky
|
||||
# Highlighted threads
|
||||
Index.sortOnTop((thread) -> thread.isOnTop) if Conf['Filter']
|
||||
# Non-hidden threads
|
||||
Index.sortOnTop((thread) -> !thread.isHidden) if Conf['Anchor Hidden Threads']
|
||||
sortedNodes.push nodes[Index.liveThreadIDs.indexOf(threadID)]
|
||||
if Index.isSearching and nodes = Index.querySearch(Index.searchInput.value)
|
||||
Index.sortedNodes = new RandomAccessList nodes
|
||||
items = [
|
||||
# Sticky threads
|
||||
fn: (thread) -> thread.isSticky
|
||||
cnd: true
|
||||
, # Highlighted threads
|
||||
fn: (thread) -> thread.isOnTop
|
||||
cnd: Conf['Filter']
|
||||
, # Non-hidden threads
|
||||
fn: (thread) -> !thread.isHidden
|
||||
cnd: Conf['Anchor Hidden Threads']
|
||||
]
|
||||
i = 0
|
||||
while item = items[i++]
|
||||
{fn, cnd} = item
|
||||
Index.sortOnTop fn if cnd
|
||||
return
|
||||
|
||||
sortOnTop: (match) ->
|
||||
offset = 0
|
||||
for threadRoot, i in Index.sortedNodes by 2 when match Get.threadFromRoot threadRoot
|
||||
Index.sortedNodes.splice offset++ * 2, 0, Index.sortedNodes.splice(i, 2)...
|
||||
{sortedNodes} = Index
|
||||
threadRoot = sortedNodes.first
|
||||
while threadRoot
|
||||
if match Get.threadFromRoot threadRoot.data
|
||||
target = sortedNodes.first
|
||||
j = 0
|
||||
while j++ < offset
|
||||
target = target.next
|
||||
unless threadRoot is target
|
||||
offset++
|
||||
sortedNodes.before target, threadRoot
|
||||
threadRoot = threadRoot.next
|
||||
return
|
||||
|
||||
buildIndex: ->
|
||||
if Conf['Index Mode'] isnt 'all pages'
|
||||
pageNum = Index.getCurrentPage()
|
||||
nodesPerPage = Index.threadsNumPerPage * 2
|
||||
nodes = Index.sortedNodes[nodesPerPage * pageNum ... nodesPerPage * (pageNum + 1)]
|
||||
nodes = Index.buildSinglePage Index.getCurrentPage()
|
||||
else
|
||||
nodes = Index.sortedNodes
|
||||
nodes = [(target = Index.sortedNodes.first).data]
|
||||
while target = target.next
|
||||
nodes.push target.data
|
||||
$.rmAll Index.root
|
||||
$.rmAll Header.hover
|
||||
Index.buildReplies nodes if Conf['Show Replies']
|
||||
$.add Index.root, nodes
|
||||
$.event 'IndexBuild', nodes
|
||||
Index.buildStructure nodes
|
||||
|
||||
buildSinglePage: (pageNum) ->
|
||||
nodes = []
|
||||
nodesPerPage = Index.threadsNumPerPage
|
||||
offset = nodesPerPage * pageNum
|
||||
end = offset + nodesPerPage
|
||||
target = Index.sortedNodes.order()[offset]
|
||||
Index.sortedNodes
|
||||
while (offset++ <= end) and target
|
||||
nodes.push target.data
|
||||
target = target.next
|
||||
nodes
|
||||
|
||||
buildStructure: (nodes) ->
|
||||
result = $.frag()
|
||||
i = 0
|
||||
$.add result, [node, $.el 'hr'] while node = nodes[i++]
|
||||
$.add Index.root, result
|
||||
$.rm hr for hr in $$ 'hr + hr', Index.root # Temp fix until I figure out where I fucked up
|
||||
$.event 'IndexBuild', result
|
||||
|
||||
isSearching: false
|
||||
|
||||
@ -449,11 +492,14 @@ Index =
|
||||
return unless keywords = query.toLowerCase().match /\S+/g
|
||||
Index.search keywords
|
||||
|
||||
search: (keywords) ->
|
||||
found = []
|
||||
for threadRoot, i in Index.sortedNodes by 2
|
||||
if Index.searchMatch Get.threadFromRoot(threadRoot), keywords
|
||||
found.push Index.sortedNodes[i], Index.sortedNodes[i + 1]
|
||||
search: (keywords) ->
|
||||
found = []
|
||||
target = Index.sortedNodes.first
|
||||
while target
|
||||
{data} = target
|
||||
if Index.searchMatch Get.threadFromRoot(data), keywords
|
||||
found.push data
|
||||
target = target.next
|
||||
found
|
||||
|
||||
searchMatch: (thread, keywords) ->
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
Main =
|
||||
init: ->
|
||||
g.threads = new SimpleDict
|
||||
g.posts = new SimpleDict
|
||||
|
||||
pathname = location.pathname.split '/'
|
||||
g.BOARD = new Board pathname[1]
|
||||
return if g.BOARD.ID in ['z', 'fk']
|
||||
@ -122,7 +125,7 @@ Main =
|
||||
'left=0,top=0,width=500,height=255,toolbar=0,resizable=0'
|
||||
$.before styleSelector.previousSibling, [$.tn '['; passLink, $.tn ']\u00A0\u00A0']
|
||||
|
||||
if g.VIEW is 'thread' or !Conf['JSON Navigation']
|
||||
unless Conf['JSON Navigation'] and g.VIEW is 'index'
|
||||
Main.initThread()
|
||||
else
|
||||
$.event '4chanXInitFinished'
|
||||
|
||||
@ -1,11 +1,13 @@
|
||||
Navigate =
|
||||
path: window.location.pathname
|
||||
path: window.location.pathname
|
||||
init: ->
|
||||
return if g.VIEW is 'catalog' or g.BOARD.ID is 'f' or !Conf['JSON Navigation']
|
||||
|
||||
# blink/webkit throw a popstate on page load. Not what we want.
|
||||
$.ready -> $.on window, 'popstate', Navigate.popstate
|
||||
|
||||
@title = -> return
|
||||
|
||||
Thread.callbacks.push
|
||||
name: 'Navigate'
|
||||
cb: @thread
|
||||
@ -34,10 +36,7 @@ Navigate =
|
||||
{posts, threads} = g
|
||||
|
||||
# Garbage collection
|
||||
g.posts = {}
|
||||
g.threads = {}
|
||||
g.BOARD.posts = {}
|
||||
g.BOARD.threads = {}
|
||||
g.threads.forEach (thread) -> thread.collect()
|
||||
|
||||
QuoteBacklink.containers = {}
|
||||
|
||||
@ -84,7 +83,7 @@ Navigate =
|
||||
feature() if condition
|
||||
catch err
|
||||
error = [
|
||||
message: "Quote Threading Failed."
|
||||
message: "#{name} Failed."
|
||||
error: err
|
||||
]
|
||||
Main.handleErrors error if error
|
||||
@ -106,10 +105,13 @@ Navigate =
|
||||
$.off d, 'IndexRefresh', QR.generatePostableThreadsList
|
||||
|
||||
updateBoard: (boardID) ->
|
||||
g.BOARD = new Board boardID
|
||||
|
||||
req = null
|
||||
|
||||
fullBoardList = $ '#full-board-list', Header.boardList
|
||||
$.rmClass $('.current', fullBoardList), 'current'
|
||||
$.addClass $("a[href*='/#{boardID}/']", fullBoardList), 'current'
|
||||
Header.generateBoardList Conf['boardnav'].replace /(\r\n|\n|\r)/g, ' '
|
||||
|
||||
onload = (e) ->
|
||||
if e.type is 'abort'
|
||||
req.onloadend = null
|
||||
@ -117,9 +119,10 @@ Navigate =
|
||||
|
||||
return unless req.status is 200
|
||||
|
||||
board = do -> try
|
||||
for board in JSON.parse(req.response).boards
|
||||
return board if board.board is boardID
|
||||
try
|
||||
for aboard in JSON.parse(req.response).boards when aboard.board is boardID
|
||||
board = aboard
|
||||
break
|
||||
|
||||
catch err
|
||||
Main.handleErrors [
|
||||
@ -130,33 +133,36 @@ Navigate =
|
||||
|
||||
return unless board
|
||||
Navigate.updateTitle board
|
||||
|
||||
return if Favicon.SFW is sfw = !!board.ws_board # Board SFW status hasn't changed
|
||||
|
||||
g.TYPE = if sfw then 'sfw' else 'nsfw'
|
||||
if Conf["NSFW/SFW Mascots"]
|
||||
Main.setMascotString()
|
||||
MascotTools.toggle()
|
||||
|
||||
if Conf["NSFW/SFW Themes"]
|
||||
Main.setThemeString()
|
||||
theme = Themes[Conf[g.THEMESTRING] or if sfw then 'Yotsuba B' else 'Yotsuba'] or Themes[Conf[g.THEMESTRING] = if sfw then 'Yotsuba B' else 'Yotsuba']
|
||||
Style.setTheme theme
|
||||
|
||||
Favicon.SFW = sfw
|
||||
Favicon.el.href = "//s.4cdn.org/image/favicon#{if sfw then '-ws' else ''}.ico"
|
||||
$.add d.head, Favicon.el # Changing the href alone doesn't update the icon on Firefox
|
||||
Favicon.init()
|
||||
|
||||
fullBoardList = $ '#full-board-list', Header.boardList
|
||||
$.rmClass $('.current', fullBoardList), 'current'
|
||||
$.addClass $("a[href*='/#{boardID}/']", fullBoardList), 'current'
|
||||
Header.generateBoardList Conf['boardnav'].replace /(\r\n|\n|\r)/g, ' '
|
||||
Navigate.updateFavicon !!board.ws_board
|
||||
|
||||
req = $.ajax '//a.4cdn.org/boards.json',
|
||||
onabort: onload
|
||||
onloadend: onload
|
||||
|
||||
updateFavicon: (sfw) ->
|
||||
# TODO: think of a better name for this. Changes style, too.
|
||||
Favicon.el.href = "//s.4cdn.org/image/favicon#{if sfw then '-ws' else ''}.ico"
|
||||
$.add d.head, Favicon.el # Changing the href alone doesn't update the icon on Firefox
|
||||
|
||||
return if Favicon.SFW is sfw # Board SFW status hasn't changed
|
||||
|
||||
Favicon.SFW = sfw
|
||||
Favicon.update()
|
||||
|
||||
g.TYPE = if sfw then 'sfw' else 'nsfw'
|
||||
if Conf["NSFW/SFW Mascots"]
|
||||
Main.setMascotString()
|
||||
MascotTools.toggle()
|
||||
|
||||
if Conf["NSFW/SFW Themes"]
|
||||
Main.setThemeString()
|
||||
theme = Themes[Conf[g.THEMESTRING] or if sfw then 'Yotsuba B' else 'Yotsuba'] or Themes[Conf[g.THEMESTRING] = if sfw then 'Yotsuba B' else 'Yotsuba']
|
||||
Style.setTheme theme
|
||||
|
||||
mainStyleSheet.href = newStyleSheet.href
|
||||
|
||||
Main.setClass()
|
||||
|
||||
updateTitle: ({board, title}) ->
|
||||
$.rm subtitle if subtitle = $ '.boardSubtitle'
|
||||
$('.boardTitle').textContent = d.title = "/#{board}/ - #{title}"
|
||||
@ -173,6 +179,7 @@ Navigate =
|
||||
|
||||
return if view is 'catalog' or 'f' in [boardID, g.BOARD.ID]
|
||||
e.preventDefault() if e
|
||||
Navigate.title = -> return
|
||||
|
||||
delete Index.pageNum
|
||||
|
||||
@ -196,18 +203,20 @@ Navigate =
|
||||
|
||||
if view is 'index'
|
||||
if boardID is g.BOARD.ID
|
||||
d.title = $('.boardTitle').textContent
|
||||
Navigate.title = -> d.title = $('.boardTitle').textContent
|
||||
else
|
||||
Navigate.updateBoard boardID
|
||||
g.BOARD = new Board boardID
|
||||
Navigate.title = -> Navigate.updateBoard boardID
|
||||
|
||||
Index.update pageNum
|
||||
|
||||
# Moving from index to thread or thread to thread
|
||||
else
|
||||
onload = (e) -> Navigate.load e
|
||||
Navigate.updateFavicon Favicon.SFW
|
||||
{load} = Navigate
|
||||
Navigate.req = $.ajax "//a.4cdn.org/#{boardID}/res/#{threadID}.json",
|
||||
onabort: onload
|
||||
onloadend: onload
|
||||
onabort: load
|
||||
onloadend: load
|
||||
|
||||
setTimeout (->
|
||||
if Navigate.req and !Navigate.notice
|
||||
@ -221,13 +230,13 @@ Navigate =
|
||||
delete Navigate.req
|
||||
delete Navigate.notice
|
||||
|
||||
if e.type is 'abort'
|
||||
if e.type is 'abort' or req.status isnt 200
|
||||
req.onloadend = null
|
||||
new Notice 'warning', "Failed to load thread.#{if req.status then " #{req.status}" else ''}"
|
||||
return
|
||||
|
||||
try
|
||||
if req.status is 200
|
||||
Navigate.parse JSON.parse(req.response).posts
|
||||
Navigate.parse JSON.parse(req.response).posts
|
||||
catch err
|
||||
console.error 'Navigate failure:'
|
||||
console.log err
|
||||
@ -270,7 +279,8 @@ Navigate =
|
||||
Main.callbackNodes Thread, [thread]
|
||||
Main.callbackNodes Post, posts
|
||||
|
||||
Navigate.ready 'Quote Threading', QuoteThreading.force, Conf['Quote Threading']
|
||||
Navigate.ready 'Quote Threading', QuoteThreading.force, Conf['Quote Threading'] and not Conf['Unread Count']
|
||||
Navigate.ready 'Unread Count', Unread.ready, Conf['Unread Count']
|
||||
|
||||
Navigate.buildThread()
|
||||
Header.hashScroll.call window
|
||||
@ -281,7 +291,6 @@ Navigate =
|
||||
$.add board, [Navigate.threadRoot, $.el 'hr']
|
||||
|
||||
if Conf['Unread Count']
|
||||
Navigate.ready 'Unread Count', Unread.ready, not Conf['Quote Threading']
|
||||
Unread.read()
|
||||
Unread.update()
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ class Board
|
||||
toString: -> @ID
|
||||
|
||||
constructor: (@ID) ->
|
||||
@threads = {}
|
||||
@posts = {}
|
||||
@threads = new SimpleDict
|
||||
@posts = new SimpleDict
|
||||
|
||||
g.boards[@] = @
|
||||
@ -5,4 +5,5 @@
|
||||
<%= grunt.file.read('src/General/lib/clone.class') %>
|
||||
<%= grunt.file.read('src/General/lib/databoard.class') %>
|
||||
<%= grunt.file.read('src/General/lib/notice.class') %>
|
||||
<%= grunt.file.read('src/General/lib/randomaccesslist.class') %>
|
||||
<%= grunt.file.read('src/General/lib/randomaccesslist.class') %>
|
||||
<%= grunt.file.read('src/General/lib/simpledict.class') %>
|
||||
@ -54,7 +54,7 @@ class Post
|
||||
@parseFile that
|
||||
|
||||
@clones = []
|
||||
g.posts[@fullID] = thread.posts[@] = board.posts[@] = @
|
||||
g.posts.push @fullID, thread.posts.push @, board.posts.push @, @
|
||||
@kill() if that.isArchived
|
||||
|
||||
parseComment: ->
|
||||
@ -208,9 +208,9 @@ class Post
|
||||
|
||||
collect: ->
|
||||
@kill()
|
||||
delete g.posts[@fullID]
|
||||
delete @thread.posts[@]
|
||||
delete @board.posts[@]
|
||||
g.posts.rm @fullID
|
||||
@thread.posts.rm @
|
||||
@board.posts.rm @
|
||||
|
||||
addClone: (context) ->
|
||||
new Clone @, context
|
||||
|
||||
@ -1,19 +1,36 @@
|
||||
class RandomAccessList
|
||||
constructor: ->
|
||||
constructor: (items) ->
|
||||
@length = 0
|
||||
@push item for item in items if items
|
||||
|
||||
push: (item) ->
|
||||
{ID} = item
|
||||
push: (data) ->
|
||||
{ID} = data
|
||||
ID or= data.id
|
||||
return if @[ID]
|
||||
{last} = @
|
||||
@[ID] = item =
|
||||
prev: last
|
||||
next: null
|
||||
data: data
|
||||
ID: ID
|
||||
item.prev = last
|
||||
@[ID] = item
|
||||
@last = if last
|
||||
last.next = item
|
||||
else
|
||||
@first = item
|
||||
@length++
|
||||
|
||||
before: (root, item) ->
|
||||
return if item.next is root
|
||||
|
||||
@rmi item
|
||||
|
||||
{prev} = root
|
||||
root.prev = item
|
||||
item.next = root
|
||||
item.prev = prev
|
||||
prev.next = item if prev
|
||||
|
||||
after: (root, item) ->
|
||||
return if item.prev is root
|
||||
|
||||
@ -24,7 +41,7 @@ class RandomAccessList
|
||||
item.prev = root
|
||||
item.next = next
|
||||
next.prev = item if next
|
||||
|
||||
|
||||
prepend: (item) ->
|
||||
{first} = @
|
||||
return if item is first or not @[item.ID]
|
||||
@ -36,6 +53,11 @@ class RandomAccessList
|
||||
|
||||
shift: ->
|
||||
@rm @first.ID
|
||||
|
||||
order: ->
|
||||
order = [item = @first]
|
||||
order.push item while item = item.next
|
||||
order
|
||||
|
||||
rm: (ID) ->
|
||||
item = @[ID]
|
||||
|
||||
16
src/General/lib/simpledict.class
Normal file
16
src/General/lib/simpledict.class
Normal file
@ -0,0 +1,16 @@
|
||||
class SimpleDict
|
||||
constructor: ->
|
||||
@keys = []
|
||||
|
||||
push: (key, data) ->
|
||||
key = "#{key}"
|
||||
@keys.push key unless @[key]
|
||||
@[key] = data
|
||||
|
||||
rm: (key) ->
|
||||
key = "#{key}"
|
||||
if (i = @keys.indexOf key) isnt -1
|
||||
@keys.splice i, 1
|
||||
delete @[key]
|
||||
|
||||
forEach: (fn) -> fn @[key] for key in [@keys...]
|
||||
@ -4,13 +4,13 @@ class Thread
|
||||
|
||||
constructor: (@ID, @board) ->
|
||||
@fullID = "#{@board}.#{@ID}"
|
||||
@posts = {}
|
||||
@posts = new SimpleDict
|
||||
@isSticky = false
|
||||
@isClosed = false
|
||||
@postLimit = false
|
||||
@fileLimit = false
|
||||
|
||||
g.threads[@fullID] = board.threads[@] = @
|
||||
g.threads.push @fullID, board.threads.push @, @
|
||||
|
||||
setPage: (pageNum) ->
|
||||
icon = $ '.page-num', @OP.nodes.post
|
||||
@ -44,7 +44,6 @@ class Thread
|
||||
@timeOfDeath = Date.now()
|
||||
|
||||
collect: ->
|
||||
for postID, post in @posts
|
||||
post.collect()
|
||||
delete g.threads[@fullID]
|
||||
delete @board.threads[@]
|
||||
@posts.forEach (post) -> post.collect()
|
||||
g.threads.rm @fullID
|
||||
@board.threads.rm @
|
||||
|
||||
@ -35,6 +35,14 @@ ImageExpand =
|
||||
ImageExpand.toggle Get.postFromNode @
|
||||
toggleAll: ->
|
||||
$.event 'CloseMenu'
|
||||
func = (post) ->
|
||||
{file} = post
|
||||
return unless file and file.isImage and doc.contains post.nodes.root
|
||||
if ImageExpand.on and
|
||||
(!Conf['Expand spoilers'] and file.isSpoiler or
|
||||
Conf['Expand from here'] and Header.getTopOf(file.thumb) < 0)
|
||||
return
|
||||
$.queueTask func, post
|
||||
if ImageExpand.on = $.hasClass ImageExpand.EAI, 'expand-all-shortcut'
|
||||
ImageExpand.EAI.className = 'contract-all-shortcut a-icon'
|
||||
ImageExpand.EAI.title = 'Contract All Images'
|
||||
@ -43,16 +51,10 @@ ImageExpand =
|
||||
ImageExpand.EAI.className = 'expand-all-shortcut a-icon'
|
||||
ImageExpand.EAI.title = 'Expand All Images'
|
||||
func = ImageExpand.contract
|
||||
for ID, post of g.posts
|
||||
for post in [post].concat post.clones
|
||||
{file} = post
|
||||
continue unless file and file.isImage and doc.contains post.nodes.root
|
||||
if ImageExpand.on and
|
||||
(!Conf['Expand spoilers'] and file.isSpoiler or
|
||||
Conf['Expand from here'] and Header.getTopOf(file.thumb) < 0)
|
||||
continue
|
||||
$.queueTask func, post
|
||||
return
|
||||
g.posts.forEach (post) ->
|
||||
func post
|
||||
func post for post in post.clones
|
||||
return
|
||||
setFitness: ->
|
||||
(if @checked then $.addClass else $.rmClass) doc, @name.toLowerCase().replace /\s+/g, '-'
|
||||
|
||||
|
||||
@ -7,6 +7,10 @@ ImageLoader =
|
||||
name: 'Image Replace'
|
||||
cb: @node
|
||||
|
||||
Thread.callbacks.push
|
||||
name: 'Image Replace'
|
||||
cb: @thread
|
||||
|
||||
return unless Conf['Image Prefetching'] and g.VIEW is 'thread'
|
||||
|
||||
prefetch = $.el 'label',
|
||||
@ -19,6 +23,9 @@ ImageLoader =
|
||||
type: 'header'
|
||||
el: prefetch
|
||||
order: 104
|
||||
|
||||
thread: ->
|
||||
ImageLoader.thread = @
|
||||
|
||||
node: ->
|
||||
return if @isClone or @isHidden or @thread.isHidden or !@file?.isImage
|
||||
@ -38,5 +45,5 @@ ImageLoader =
|
||||
toggle: ->
|
||||
enabled = Conf['prefetch'] = @checked
|
||||
if enabled
|
||||
ImageLoader.node.call post for id, post of g.threads["#{g.BOARD.ID}.#{g.THREADID}"].posts
|
||||
ImageLoader.thread.posts.forEach ImageLoader.node.call
|
||||
return
|
||||
@ -19,9 +19,8 @@ ExpandThread =
|
||||
|
||||
onIndexRefresh: ->
|
||||
ExpandThread.disconnect true
|
||||
for threadID, thread of g.BOARD.threads
|
||||
g.BOARD.threads.forEach (thread) ->
|
||||
ExpandThread.setButton thread
|
||||
return
|
||||
|
||||
text: (status, posts, files) ->
|
||||
"#{status} #{posts} post#{if posts > 1 then 's' else ''}" +
|
||||
|
||||
@ -1,54 +1,63 @@
|
||||
Favicon =
|
||||
init: ->
|
||||
t = 'data:image/png;base64,'
|
||||
items = {
|
||||
ferongr: [
|
||||
'<%= grunt.file.read("src/General/img/favicons/ferongr/unreadDead.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/ferongr/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/ferongr/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/ferongr/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/ferongr/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/ferongr/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
'xat-': [
|
||||
'<%= grunt.file.read("src/General/img/favicons/xat-/unreadDead.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/xat-/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/xat-/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/xat-/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/xat-/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/xat-/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
Mayhem: [
|
||||
'<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadDead.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
'4chanJS': [
|
||||
'<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadDead.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
Original: [
|
||||
'<%= grunt.file.read("src/General/img/favicons/Original/unreadDead.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Original/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Original/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Original/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Original/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
'<%= grunt.file.read("src/General/img/favicons/Original/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
}[Conf['favicon']]
|
||||
|
||||
f = Favicon
|
||||
[f.unreadDead, funreadDeadY, f.unreadSFW, f.unreadSFWY, f.unreadNSFW, f.unreadNSFWY] = switch Conf['favicon']
|
||||
when 'ferongr' then [
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/ferongr/unreadDead.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/ferongr/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/ferongr/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/ferongr/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/ferongr/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/ferongr/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
when 'xat-' then [
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/xat-/unreadDead.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/xat-/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/xat-/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/xat-/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/xat-/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/xat-/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
when 'Mayhem' then [
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadDead.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Mayhem/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
when '4chanJS' then [
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadDead.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/4chanJS/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
when 'Original' then [
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Original/unreadDead.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Original/unreadDeadY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Original/unreadSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Original/unreadSFWY.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Original/unreadNSFW.png", {encoding: "base64"}) %>'
|
||||
t + '<%= grunt.file.read("src/General/img/favicons/Original/unreadNSFWY.png", {encoding: "base64"}) %>'
|
||||
]
|
||||
if Favicon.SFW
|
||||
Favicon.unread = Favicon.unreadSFW
|
||||
Favicon.unreadY = Favicon.unreadSFWY
|
||||
t = 'data:image/png;base64,'
|
||||
i = 0
|
||||
while items[i]
|
||||
items[i] = t + items[i++]
|
||||
[f.unreadDead, funreadDeadY, f.unreadSFW, f.unreadSFWY, f.unreadNSFW, f.unreadNSFWY] = items
|
||||
f.update()
|
||||
|
||||
update: ->
|
||||
if @SFW
|
||||
@unread = @unreadSFW
|
||||
@unreadY = @unreadSFWY
|
||||
else
|
||||
Favicon.unread = Favicon.unreadNSFW
|
||||
Favicon.unreadY = Favicon.unreadNSFWY
|
||||
@unread = @unreadNSFW
|
||||
@unreadY = @unreadNSFWY
|
||||
|
||||
dead: 'data:image/gif;base64,<%= grunt.file.read("src/General/img/favicons/dead.gif", {encoding: "base64"}) %>'
|
||||
logo: 'data:image/png;base64,<%= grunt.file.read("src/General/img/icon128.png", {encoding: "base64"}) %>'
|
||||
|
||||
@ -26,7 +26,7 @@ ThreadStats =
|
||||
node: ->
|
||||
postCount = 0
|
||||
fileCount = 0
|
||||
for ID, post of @posts
|
||||
@posts.forEach (post) ->
|
||||
postCount++
|
||||
fileCount++ if post.file
|
||||
ThreadStats.thread = @
|
||||
|
||||
@ -291,11 +291,11 @@ ThreadUpdater =
|
||||
deletedFiles = []
|
||||
|
||||
# Check for deleted posts/files.
|
||||
for ID, post of ThreadUpdater.thread.posts
|
||||
ThreadUpdater.thread.posts.forEach (post) ->
|
||||
# XXX tmp fix for 4chan's racing condition
|
||||
# giving us false-positive dead posts.
|
||||
# continue if post.isDead
|
||||
ID = +ID
|
||||
ID = +post.ID
|
||||
|
||||
unless ID in index
|
||||
post.kill()
|
||||
@ -306,6 +306,7 @@ ThreadUpdater =
|
||||
post.kill true
|
||||
deletedFiles.push post
|
||||
|
||||
# Fetching your own posts after posting
|
||||
if ThreadUpdater.postID and ThreadUpdater.postID is ID
|
||||
ThreadUpdater.foundPost = true
|
||||
|
||||
@ -326,8 +327,7 @@ ThreadUpdater =
|
||||
scroll = Conf['Auto Scroll'] and ThreadUpdater.scrollBG() and
|
||||
ThreadUpdater.root.getBoundingClientRect().bottom - doc.clientHeight < 25
|
||||
|
||||
for key, post of posts
|
||||
continue unless posts.hasOwnProperty key
|
||||
for post in posts
|
||||
root = post.nodes.root
|
||||
if post.cb
|
||||
unless post.cb()
|
||||
|
||||
@ -174,7 +174,9 @@ ThreadWatcher =
|
||||
$.rmAll list
|
||||
$.add list, nodes
|
||||
|
||||
for threadID, thread of g.BOARD.threads
|
||||
{threads} = g.BOARD
|
||||
for threadID in threads.keys
|
||||
thread = threads[threadID]
|
||||
toggler = $ '.watch-thread-link', thread.OP.nodes.post
|
||||
watched = ThreadWatcher.db.get {boardID: thread.board.ID, threadID}
|
||||
helper = if watched then ['addClass', 'Unwatch'] else ['rmClass', 'Watch']
|
||||
|
||||
@ -42,7 +42,7 @@ Unread =
|
||||
ready: ->
|
||||
$.off d, '4chanXInitFinished', Unread.ready
|
||||
posts = []
|
||||
posts.push post for ID, post of Unread.thread.posts when post.isReply
|
||||
Unread.thread.posts.forEach (post) -> posts.push post if post.isReply
|
||||
Unread.addPosts posts unless Conf['Quote Threading']
|
||||
QuoteThreading.force() if Conf['Quote Threading']
|
||||
Unread.scroll() if Conf['Scroll to Last Read Post']
|
||||
@ -52,14 +52,15 @@ Unread =
|
||||
return if (hash = location.hash.match /\d+/) and hash[0] of Unread.thread.posts
|
||||
if post = Unread.posts.first
|
||||
# Scroll to a non-hidden, non-OP post that's before the first unread post.
|
||||
while root = $.x 'preceding-sibling::div[contains(@class,"replyContainer")][1]', post.nodes.root
|
||||
while root = $.x 'preceding-sibling::div[contains(@class,"replyContainer")][1]', post.data.nodes.root
|
||||
break unless (post = Get.postFromRoot root).isHidden
|
||||
return unless root
|
||||
down = true
|
||||
else
|
||||
# Scroll to the last read post.
|
||||
posts = Object.keys Unread.thread.posts
|
||||
{root} = Unread.thread.posts[posts[posts.length - 1]].nodes
|
||||
{posts} = Unread.thread
|
||||
{keys} = posts
|
||||
{root} = posts[keys[keys.length - 1]].nodes
|
||||
|
||||
# Scroll to the target unless we scrolled past it.
|
||||
Header.scrollTo root, down if Header.getBottomOf(root) < 0
|
||||
@ -94,12 +95,11 @@ Unread =
|
||||
Unread.addPostQuotingYou post
|
||||
if Conf['Unread Line']
|
||||
# Force line on visible threads if there were no unread posts previously.
|
||||
Unread.setLine Unread.posts.first in posts
|
||||
Unread.setLine Unread.posts.first?.data in posts
|
||||
Unread.read()
|
||||
Unread.update()
|
||||
|
||||
addPostQuotingYou: (post) ->
|
||||
return unless QR.db
|
||||
for quotelink in post.nodes.quotelinks when QR.db.get Get.postDataFromLink quotelink
|
||||
Unread.postsQuotingYou.push post
|
||||
Unread.openNotification post
|
||||
@ -130,11 +130,12 @@ Unread =
|
||||
|
||||
readSinglePost: (post) ->
|
||||
{ID} = post
|
||||
return unless Unread.posts[ID]
|
||||
if post is Unread.posts.first
|
||||
{posts} = Unread
|
||||
return unless posts[ID]
|
||||
if post is posts.first
|
||||
Unread.lastReadPost = ID
|
||||
Unread.saveLastReadPost()
|
||||
Unread.posts.rm ID
|
||||
posts.rm ID
|
||||
if (i = Unread.postsQuotingYou.indexOf post) isnt -1
|
||||
Unread.postsQuotingYou.splice i, 1
|
||||
Unread.update()
|
||||
@ -150,12 +151,16 @@ Unread =
|
||||
|
||||
{posts} = Unread
|
||||
while post = posts.first
|
||||
break unless Header.getBottomOf(post.nodes.root) > -1 # post is not completely read
|
||||
{ID} = post
|
||||
break unless Header.getBottomOf(post.data.nodes.root) > -1 # post is not completely read
|
||||
{ID, data} = post
|
||||
posts.rm ID
|
||||
|
||||
if Conf['Mark Quotes of You'] and post.info.yours
|
||||
QuoteYou.lastRead = post.nodes.root
|
||||
if Conf['Mark Quotes of You'] and QR.db.get {
|
||||
boardID: data.board.ID
|
||||
threadID: data.thread.ID
|
||||
postID: ID
|
||||
}
|
||||
QuoteYou.lastRead = data.nodes.root
|
||||
|
||||
return unless ID
|
||||
|
||||
@ -174,8 +179,8 @@ Unread =
|
||||
setLine: (force) ->
|
||||
return unless d.hidden or force is true
|
||||
return $.rm Unread.hr unless post = Unread.posts.first
|
||||
if $.x 'preceding-sibling::div[contains(@class,"replyContainer")]', post.nodes.root # not the first reply
|
||||
$.before post.nodes.root, Unread.hr
|
||||
if $.x 'preceding-sibling::div[contains(@class,"replyContainer")]', post.data.nodes.root # not the first reply
|
||||
$.before post.data.nodes.root, Unread.hr
|
||||
|
||||
update: <% if (type === 'crx') { %>(dontrepeat) <% } %>->
|
||||
count = Unread.posts.length
|
||||
|
||||
@ -324,7 +324,7 @@ QR =
|
||||
return unless QR.nodes
|
||||
list = QR.nodes.thread
|
||||
options = [list.firstChild]
|
||||
for thread of g.BOARD.threads
|
||||
for thread in g.BOARD.threads.keys
|
||||
options.push $.el 'option',
|
||||
value: thread
|
||||
textContent: "Thread No.#{thread}"
|
||||
|
||||
@ -42,15 +42,19 @@ QuoteThreading =
|
||||
QuoteThreading.force()
|
||||
|
||||
force: ->
|
||||
post.cb true for ID, post of g.posts when post.cb
|
||||
return
|
||||
g.posts.forEach (post) ->
|
||||
post.cb true if post.cb
|
||||
|
||||
if Conf['Unread Count'] and Unread.thread.OP.nodes.root.parentElement.parentElement
|
||||
Unread.read()
|
||||
Unread.update()
|
||||
|
||||
node: ->
|
||||
{posts} = g
|
||||
return if @isClone or not QuoteThreading.enabled
|
||||
Unread.posts.push @ if Conf['Unread Count']
|
||||
|
||||
return if @thread.OP is @ or !(post = posts[@fullID]) or post.isHidden # Filtered
|
||||
Unread.posts.push @ if Conf['Unread Count']
|
||||
return if @thread.OP is @ or @isHidden # Filtered
|
||||
|
||||
keys = []
|
||||
len = g.BOARD.ID.length + 1
|
||||
@ -90,11 +94,11 @@ QuoteThreading =
|
||||
|
||||
return true unless Conf['Unread Count']
|
||||
|
||||
if posts[post.ID]
|
||||
posts.after post, @
|
||||
if post = posts[post.ID]
|
||||
posts.after post, posts[@ID]
|
||||
|
||||
else
|
||||
posts.prepend @
|
||||
posts.prepend posts[@ID]
|
||||
|
||||
return true
|
||||
|
||||
@ -106,8 +110,10 @@ QuoteThreading =
|
||||
thread = $('.thread')
|
||||
posts = []
|
||||
nodes = []
|
||||
|
||||
g.posts.forEach (post) ->
|
||||
posts.push post unless post is post.thread.OP or post.isClone
|
||||
|
||||
posts.push post for ID, post of g.posts when not (post is post.thread.OP or post.isClone)
|
||||
posts.sort (a, b) -> a.ID - b.ID
|
||||
|
||||
nodes.push post.nodes.root for post in posts
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user