2016-11-15 16:56:29 +01:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-03-08 20:16:11 +01:00
|
|
|
class Feed
|
|
|
|
def initialize(type, account)
|
|
|
|
@type = type
|
|
|
|
@account = account
|
|
|
|
end
|
|
|
|
|
2016-10-02 22:35:27 +02:00
|
|
|
def get(limit, max_id = nil, since_id = nil)
|
|
|
|
max_id = '+inf' if max_id.blank?
|
|
|
|
since_id = '-inf' if since_id.blank?
|
2016-10-22 19:38:47 +02:00
|
|
|
unhydrated = redis.zrevrangebyscore(key, "(#{max_id}", "(#{since_id}", limit: [0, limit], with_scores: true).map(&:last).map(&:to_i)
|
2016-03-08 20:16:11 +01:00
|
|
|
|
|
|
|
# If we're after most recent items and none are there, we need to precompute the feed
|
2016-10-19 18:20:19 +02:00
|
|
|
if unhydrated.empty? && max_id == '+inf' && since_id == '-inf'
|
2016-11-08 02:08:32 +01:00
|
|
|
RegenerationWorker.perform_async(@account.id, @type)
|
2016-11-09 17:48:44 +01:00
|
|
|
@statuses = Status.send("as_#{@type}_timeline", @account).paginate_by_max_id(limit, nil, nil)
|
2016-03-25 02:13:30 +01:00
|
|
|
else
|
2016-10-19 18:20:19 +02:00
|
|
|
status_map = Status.where(id: unhydrated).with_includes.with_counters.map { |status| [status.id, status] }.to_h
|
2016-11-09 17:48:44 +01:00
|
|
|
@statuses = unhydrated.map { |id| status_map[id] }.compact
|
2016-03-25 02:13:30 +01:00
|
|
|
end
|
2016-11-09 17:48:44 +01:00
|
|
|
|
|
|
|
@statuses
|
2016-03-08 20:16:11 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def key
|
2016-09-09 20:04:34 +02:00
|
|
|
FeedManager.instance.key(@type, @account.id)
|
2016-03-08 20:16:11 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def redis
|
2016-11-15 16:56:29 +01:00
|
|
|
Redis.current
|
2016-03-08 20:16:11 +01:00
|
|
|
end
|
|
|
|
end
|