2019-01-24 18:19:02 +00:00
|
|
|
def refresh_channels(db, logger, max_threads = 1, full_refresh = false)
|
2018-08-08 01:25:59 +00:00
|
|
|
max_channel = Channel(Int32).new
|
|
|
|
|
|
|
|
spawn do
|
|
|
|
max_threads = max_channel.receive
|
|
|
|
active_threads = 0
|
|
|
|
active_channel = Channel(Bool).new
|
2018-08-04 20:30:44 +00:00
|
|
|
|
2018-08-08 01:25:59 +00:00
|
|
|
loop do
|
2019-02-18 21:44:15 +00:00
|
|
|
db.query("SELECT id FROM channels ORDER BY updated") do |rs|
|
2018-08-08 01:25:59 +00:00
|
|
|
rs.each do
|
2018-08-04 20:30:44 +00:00
|
|
|
id = rs.read(String)
|
2018-08-08 01:25:59 +00:00
|
|
|
|
|
|
|
if active_threads >= max_threads
|
|
|
|
if active_channel.receive
|
|
|
|
active_threads -= 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
active_threads += 1
|
|
|
|
spawn do
|
|
|
|
begin
|
2019-02-24 22:39:44 +00:00
|
|
|
channel = fetch_channel(id, db, full_refresh)
|
2018-08-08 01:25:59 +00:00
|
|
|
|
2019-02-18 21:44:15 +00:00
|
|
|
db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.now, channel.author, id)
|
2018-08-08 01:25:59 +00:00
|
|
|
rescue ex
|
2019-02-09 16:18:24 +00:00
|
|
|
if ex.message == "Deleted or invalid channel"
|
2019-02-27 23:31:17 +00:00
|
|
|
db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.now, id)
|
2019-02-09 16:18:24 +00:00
|
|
|
end
|
2019-01-24 18:16:10 +00:00
|
|
|
logger.write("#{id} : #{ex.message}\n")
|
2018-08-08 01:25:59 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
active_channel.send(true)
|
|
|
|
end
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
end
|
2019-03-25 14:23:42 +00:00
|
|
|
|
|
|
|
sleep 1.minute
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
end
|
2018-08-08 01:25:59 +00:00
|
|
|
|
|
|
|
max_channel.send(max_threads)
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
|
2019-05-26 16:59:23 +00:00
|
|
|
def refresh_feeds(db, logger, max_threads = 1, use_feed_events = false)
|
2018-10-09 22:24:29 +00:00
|
|
|
max_channel = Channel(Int32).new
|
|
|
|
|
2019-05-26 16:59:23 +00:00
|
|
|
# TODO: Instead of Fiber.yield, use proper queuing to prevent overloading DB
|
2019-05-26 16:28:54 +00:00
|
|
|
# Spawn thread to handle feed events
|
2019-05-26 16:59:23 +00:00
|
|
|
if use_feed_events
|
2019-05-26 16:28:54 +00:00
|
|
|
spawn do
|
|
|
|
PG.connect_listen(PG_URL, "feeds") do |event|
|
|
|
|
spawn do
|
|
|
|
feed = JSON.parse(event.payload)
|
|
|
|
email = feed["email"].as_s
|
|
|
|
action = feed["action"].as_s
|
|
|
|
|
|
|
|
view_name = "subscriptions_#{sha256(email)}"
|
|
|
|
|
|
|
|
case action
|
|
|
|
when "refresh"
|
|
|
|
db.exec("REFRESH MATERIALIZED VIEW #{view_name}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
Fiber.yield
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-09 22:24:29 +00:00
|
|
|
spawn do
|
|
|
|
max_threads = max_channel.receive
|
|
|
|
active_threads = 0
|
|
|
|
active_channel = Channel(Bool).new
|
|
|
|
|
|
|
|
loop do
|
|
|
|
db.query("SELECT email FROM users") do |rs|
|
|
|
|
rs.each do
|
|
|
|
email = rs.read(String)
|
2019-04-11 00:56:38 +00:00
|
|
|
view_name = "subscriptions_#{sha256(email)}"
|
2018-10-09 22:24:29 +00:00
|
|
|
|
|
|
|
if active_threads >= max_threads
|
|
|
|
if active_channel.receive
|
|
|
|
active_threads -= 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
active_threads += 1
|
|
|
|
spawn do
|
|
|
|
begin
|
2019-04-11 00:56:38 +00:00
|
|
|
# Drop outdated views
|
|
|
|
column_array = get_column_array(db, view_name)
|
|
|
|
ChannelVideo.to_type_tuple.each_with_index do |name, i|
|
|
|
|
if name != column_array[i]?
|
|
|
|
logger.write("DROP MATERIALIZED VIEW #{view_name}\n")
|
2019-03-22 21:53:16 +00:00
|
|
|
db.exec("DROP MATERIALIZED VIEW #{view_name}")
|
2019-04-11 00:56:38 +00:00
|
|
|
raise "view does not exist"
|
2019-03-22 21:53:16 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-09 22:24:29 +00:00
|
|
|
db.exec("REFRESH MATERIALIZED VIEW #{view_name}")
|
|
|
|
rescue ex
|
2019-04-11 00:56:38 +00:00
|
|
|
# Rename old views
|
|
|
|
begin
|
|
|
|
legacy_view_name = "subscriptions_#{sha256(email)[0..7]}"
|
|
|
|
|
|
|
|
db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0")
|
|
|
|
logger.write("RENAME MATERIALIZED VIEW #{legacy_view_name}\n")
|
|
|
|
db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}")
|
|
|
|
rescue ex
|
|
|
|
begin
|
|
|
|
# While iterating through, we may have an email stored from a deleted account
|
|
|
|
if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool)
|
|
|
|
logger.write("CREATE #{view_name}\n")
|
|
|
|
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
|
|
|
|
SELECT * FROM channel_videos WHERE \
|
|
|
|
ucid = ANY ((SELECT subscriptions FROM users WHERE email = E'#{email.gsub("'", "\\'")}')::text[]) \
|
|
|
|
ORDER BY published DESC;")
|
|
|
|
end
|
|
|
|
rescue ex
|
|
|
|
logger.write("REFRESH #{email} : #{ex.message}\n")
|
2019-03-20 16:01:54 +00:00
|
|
|
end
|
2019-02-27 15:10:28 +00:00
|
|
|
end
|
2018-10-09 22:24:29 +00:00
|
|
|
end
|
2018-10-09 13:40:29 +00:00
|
|
|
|
2018-10-09 22:24:29 +00:00
|
|
|
active_channel.send(true)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-03-25 14:23:42 +00:00
|
|
|
|
|
|
|
sleep 1.minute
|
2018-10-09 13:40:29 +00:00
|
|
|
end
|
|
|
|
end
|
2018-10-09 22:24:29 +00:00
|
|
|
|
|
|
|
max_channel.send(max_threads)
|
2018-10-09 13:40:29 +00:00
|
|
|
end
|
|
|
|
|
2019-03-04 01:18:23 +00:00
|
|
|
def subscribe_to_feeds(db, logger, key, config)
|
|
|
|
if config.use_pubsub_feeds
|
2019-04-04 12:49:53 +00:00
|
|
|
case config.use_pubsub_feeds
|
|
|
|
when Bool
|
|
|
|
max_threads = config.use_pubsub_feeds.as(Bool).to_unsafe
|
|
|
|
when Int32
|
|
|
|
max_threads = config.use_pubsub_feeds.as(Int32)
|
|
|
|
end
|
|
|
|
max_channel = Channel(Int32).new
|
|
|
|
|
2019-03-04 01:18:23 +00:00
|
|
|
spawn do
|
2019-04-04 12:49:53 +00:00
|
|
|
max_threads = max_channel.receive
|
|
|
|
active_threads = 0
|
|
|
|
active_channel = Channel(Bool).new
|
|
|
|
|
2019-03-04 01:18:23 +00:00
|
|
|
loop do
|
2019-04-04 12:49:53 +00:00
|
|
|
db.query_all("SELECT id FROM channels WHERE CURRENT_TIMESTAMP - subscribed > interval '4 days' OR subscribed IS NULL") do |rs|
|
2019-03-05 20:41:38 +00:00
|
|
|
rs.each do
|
|
|
|
ucid = rs.read(String)
|
2019-03-04 01:18:23 +00:00
|
|
|
|
2019-04-04 12:49:53 +00:00
|
|
|
if active_threads >= max_threads.as(Int32)
|
|
|
|
if active_channel.receive
|
|
|
|
active_threads -= 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
active_threads += 1
|
|
|
|
|
|
|
|
spawn do
|
|
|
|
begin
|
|
|
|
response = subscribe_pubsub(ucid, key, config)
|
|
|
|
|
|
|
|
if response.status_code >= 400
|
|
|
|
logger.write("#{ucid} : #{response.body}\n")
|
|
|
|
end
|
|
|
|
rescue ex
|
|
|
|
end
|
|
|
|
|
|
|
|
active_channel.send(true)
|
2019-03-05 20:41:38 +00:00
|
|
|
end
|
2019-03-04 01:18:23 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
sleep 1.minute
|
|
|
|
end
|
|
|
|
end
|
2019-04-04 12:49:53 +00:00
|
|
|
|
|
|
|
max_channel.send(max_threads.as(Int32))
|
2019-03-04 01:18:23 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
def pull_top_videos(config, db)
|
|
|
|
loop do
|
|
|
|
begin
|
2019-03-03 17:51:28 +00:00
|
|
|
top = rank_videos(db, 40)
|
2018-08-04 20:30:44 +00:00
|
|
|
rescue ex
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
if top.size > 0
|
|
|
|
args = arg_array(top)
|
|
|
|
else
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
videos = [] of Video
|
|
|
|
|
|
|
|
top.each do |id|
|
|
|
|
begin
|
|
|
|
videos << get_video(id, db)
|
|
|
|
rescue ex
|
|
|
|
next
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
yield videos
|
2019-03-25 14:23:42 +00:00
|
|
|
sleep 1.minute
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-09 02:08:03 +00:00
|
|
|
def pull_popular_videos(db)
|
|
|
|
loop do
|
2019-03-01 22:06:45 +00:00
|
|
|
subscriptions = db.query_all("SELECT channel FROM \
|
2018-11-09 02:08:03 +00:00
|
|
|
(SELECT UNNEST(subscriptions) AS channel FROM users) AS d \
|
|
|
|
GROUP BY channel ORDER BY COUNT(channel) DESC LIMIT 40", as: String)
|
|
|
|
|
2019-03-01 22:06:45 +00:00
|
|
|
videos = db.query_all("SELECT DISTINCT ON (ucid) * FROM \
|
2018-11-09 02:08:03 +00:00
|
|
|
channel_videos WHERE ucid IN (#{arg_array(subscriptions)}) \
|
|
|
|
ORDER BY ucid, published DESC", subscriptions, as: ChannelVideo).sort_by { |video| video.published }.reverse
|
|
|
|
|
|
|
|
yield videos
|
2019-03-25 14:23:42 +00:00
|
|
|
sleep 1.minute
|
2018-11-09 02:08:03 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
def update_decrypt_function
|
|
|
|
loop do
|
|
|
|
begin
|
2018-09-15 00:50:11 +00:00
|
|
|
decrypt_function = fetch_decrypt_function
|
2018-08-04 20:30:44 +00:00
|
|
|
rescue ex
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
yield decrypt_function
|
2019-03-25 14:23:42 +00:00
|
|
|
sleep 1.minute
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
end
|
2018-09-25 22:56:59 +00:00
|
|
|
|
|
|
|
def find_working_proxies(regions)
|
2018-10-03 15:38:07 +00:00
|
|
|
loop do
|
|
|
|
regions.each do |region|
|
|
|
|
proxies = get_proxies(region).first(20)
|
|
|
|
proxies = proxies.map { |proxy| {ip: proxy[:ip], port: proxy[:port]} }
|
|
|
|
# proxies = filter_proxies(proxies)
|
2018-09-25 22:56:59 +00:00
|
|
|
|
2018-10-03 15:38:07 +00:00
|
|
|
yield region, proxies
|
2018-09-25 22:56:59 +00:00
|
|
|
end
|
2019-03-25 14:23:42 +00:00
|
|
|
|
|
|
|
sleep 1.minute
|
2018-09-25 22:56:59 +00:00
|
|
|
end
|
|
|
|
end
|