2018-03-07 23:58:33 +00:00
|
|
|
macro add_mapping(mapping)
|
|
|
|
def initialize({{*mapping.keys.map { |id| "@#{id}".id }}})
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_a
|
|
|
|
return [{{*mapping.keys.map { |id| "@#{id}".id }}}]
|
|
|
|
end
|
|
|
|
|
|
|
|
DB.mapping({{mapping}})
|
|
|
|
end
|
|
|
|
|
|
|
|
macro templated(filename)
|
2018-07-06 12:59:56 +00:00
|
|
|
render "src/invidious/views/#{{{filename}}}.ecr", "src/invidious/views/layout.ecr"
|
2018-03-07 23:58:33 +00:00
|
|
|
end
|
|
|
|
|
2018-06-01 22:26:00 +00:00
|
|
|
macro rendered(filename)
|
2018-07-06 12:59:56 +00:00
|
|
|
render "src/invidious/views/#{{{filename}}}.ecr"
|
2018-06-01 22:26:00 +00:00
|
|
|
end
|
|
|
|
|
2018-07-16 16:24:24 +00:00
|
|
|
DEFAULT_USER_PREFERENCES = Preferences.from_json({
|
2018-07-17 00:31:49 +00:00
|
|
|
"video_loop" => false,
|
|
|
|
"autoplay" => false,
|
|
|
|
"speed" => 1.0,
|
|
|
|
"quality" => "hd720",
|
|
|
|
"volume" => 100,
|
|
|
|
"dark_mode" => false,
|
|
|
|
"max_results" => 40,
|
2018-07-18 14:15:58 +00:00
|
|
|
"sort" => "published",
|
|
|
|
"latest_only" => false,
|
2018-07-16 16:24:24 +00:00
|
|
|
}.to_json)
|
|
|
|
|
2018-03-09 18:42:23 +00:00
|
|
|
class Config
|
|
|
|
YAML.mapping({
|
2018-05-01 23:51:16 +00:00
|
|
|
crawl_threads: Int32,
|
2018-04-10 03:07:09 +00:00
|
|
|
channel_threads: Int32,
|
2018-04-28 15:50:02 +00:00
|
|
|
video_threads: Int32,
|
2018-04-10 03:07:09 +00:00
|
|
|
db: NamedTuple(
|
2018-03-09 18:42:23 +00:00
|
|
|
user: String,
|
|
|
|
password: String,
|
|
|
|
host: String,
|
|
|
|
port: Int32,
|
|
|
|
dbname: String,
|
|
|
|
),
|
2018-03-17 00:52:25 +00:00
|
|
|
dl_api_key: String?,
|
2018-07-22 02:28:10 +00:00
|
|
|
https_only: Bool?,
|
2018-07-22 03:35:28 +00:00
|
|
|
hmac_key: String?,
|
2018-03-09 18:42:23 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-04-16 03:56:58 +00:00
|
|
|
class FilteredCompressHandler < Kemal::Handler
|
|
|
|
exclude ["/videoplayback"]
|
|
|
|
|
|
|
|
def call(env)
|
|
|
|
return call_next env if exclude_match? env
|
|
|
|
|
|
|
|
{% if flag?(:without_zlib) %}
|
|
|
|
call_next env
|
|
|
|
{% else %}
|
|
|
|
request_headers = env.request.headers
|
|
|
|
|
|
|
|
if request_headers.includes_word?("Accept-Encoding", "gzip")
|
|
|
|
env.response.headers["Content-Encoding"] = "gzip"
|
|
|
|
env.response.output = Gzip::Writer.new(env.response.output, sync_close: true)
|
|
|
|
elsif request_headers.includes_word?("Accept-Encoding", "deflate")
|
|
|
|
env.response.headers["Content-Encoding"] = "deflate"
|
|
|
|
env.response.output = Flate::Writer.new(env.response.output, sync_close: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
call_next env
|
|
|
|
{% end %}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-01-28 02:09:27 +00:00
|
|
|
class Video
|
|
|
|
module HTTPParamConverter
|
|
|
|
def self.from_rs(rs)
|
|
|
|
HTTP::Params.parse(rs.read(String))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-07 23:58:33 +00:00
|
|
|
add_mapping({
|
2018-01-28 02:09:27 +00:00
|
|
|
id: String,
|
|
|
|
info: {
|
|
|
|
type: HTTP::Params,
|
|
|
|
default: HTTP::Params.parse(""),
|
|
|
|
converter: Video::HTTPParamConverter,
|
|
|
|
},
|
|
|
|
updated: Time,
|
|
|
|
title: String,
|
|
|
|
views: Int64,
|
|
|
|
likes: Int32,
|
|
|
|
dislikes: Int32,
|
|
|
|
wilson_score: Float64,
|
2018-02-03 03:44:10 +00:00
|
|
|
published: Time,
|
2018-02-27 00:58:45 +00:00
|
|
|
description: String,
|
2018-03-17 00:45:37 +00:00
|
|
|
language: String?,
|
2018-06-01 22:24:16 +00:00
|
|
|
author: String,
|
|
|
|
ucid: String,
|
2018-01-28 02:09:27 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-03-25 03:38:35 +00:00
|
|
|
class InvidiousChannel
|
|
|
|
add_mapping({
|
2018-03-29 03:29:54 +00:00
|
|
|
id: String,
|
2018-03-25 03:38:35 +00:00
|
|
|
author: String,
|
2018-03-29 03:29:54 +00:00
|
|
|
updated: Time,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
class ChannelVideo
|
|
|
|
add_mapping({
|
|
|
|
id: String,
|
|
|
|
title: String,
|
|
|
|
published: Time,
|
|
|
|
updated: Time,
|
|
|
|
ucid: String,
|
|
|
|
author: String,
|
2018-03-25 03:38:35 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-03-30 02:41:05 +00:00
|
|
|
class User
|
2018-07-16 16:24:24 +00:00
|
|
|
module PreferencesConverter
|
|
|
|
def self.from_rs(rs)
|
2018-07-19 00:13:06 +00:00
|
|
|
begin
|
|
|
|
Preferences.from_json(rs.read(String))
|
|
|
|
rescue ex
|
|
|
|
DEFAULT_USER_PREFERENCES
|
|
|
|
end
|
2018-07-16 16:24:24 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 02:41:05 +00:00
|
|
|
add_mapping({
|
2018-04-01 00:09:27 +00:00
|
|
|
id: String,
|
|
|
|
updated: Time,
|
|
|
|
notifications: Array(String),
|
|
|
|
subscriptions: Array(String),
|
|
|
|
email: String,
|
2018-07-16 16:24:24 +00:00
|
|
|
preferences: {
|
|
|
|
type: Preferences,
|
|
|
|
default: DEFAULT_USER_PREFERENCES,
|
|
|
|
converter: PreferencesConverter,
|
|
|
|
},
|
2018-07-18 19:26:02 +00:00
|
|
|
password: String?,
|
2018-07-20 16:19:49 +00:00
|
|
|
token: String,
|
2018-07-16 16:24:24 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
class Preferences
|
|
|
|
JSON.mapping({
|
2018-07-17 00:31:49 +00:00
|
|
|
video_loop: Bool,
|
|
|
|
autoplay: Bool,
|
|
|
|
speed: Float32,
|
|
|
|
quality: String,
|
|
|
|
volume: Int32,
|
|
|
|
dark_mode: Bool,
|
|
|
|
max_results: Int32,
|
2018-07-18 14:15:58 +00:00
|
|
|
sort: String,
|
|
|
|
latest_only: Bool,
|
2018-03-30 02:41:05 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-04-21 23:04:01 +00:00
|
|
|
class RedditThing
|
2018-03-03 21:06:14 +00:00
|
|
|
JSON.mapping({
|
2018-04-21 23:04:01 +00:00
|
|
|
kind: String,
|
|
|
|
data: RedditComment | RedditLink | RedditMore | RedditListing,
|
2018-03-03 21:06:14 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-04-21 23:04:01 +00:00
|
|
|
class RedditComment
|
2018-03-03 21:06:14 +00:00
|
|
|
JSON.mapping({
|
2018-04-21 23:04:01 +00:00
|
|
|
author: String,
|
|
|
|
body_html: String,
|
|
|
|
replies: RedditThing | String,
|
|
|
|
score: Int32,
|
|
|
|
depth: Int32,
|
2018-03-03 21:06:14 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-04-21 23:04:01 +00:00
|
|
|
class RedditLink
|
2018-03-03 21:06:14 +00:00
|
|
|
JSON.mapping({
|
2018-04-21 23:04:01 +00:00
|
|
|
author: String,
|
|
|
|
score: Int32,
|
2018-03-03 21:06:14 +00:00
|
|
|
subreddit: String,
|
|
|
|
num_comments: Int32,
|
2018-04-21 23:04:01 +00:00
|
|
|
id: String,
|
2018-03-03 21:06:14 +00:00
|
|
|
permalink: String,
|
|
|
|
title: String,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-04-21 23:04:01 +00:00
|
|
|
class RedditMore
|
|
|
|
JSON.mapping({
|
|
|
|
children: Array(String),
|
|
|
|
count: Int32,
|
|
|
|
depth: Int32,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
class RedditListing
|
|
|
|
JSON.mapping({
|
|
|
|
children: Array(RedditThing),
|
|
|
|
modhash: String,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-01-21 00:19:12 +00:00
|
|
|
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
|
|
|
|
def ci_lower_bound(pos, n)
|
|
|
|
if n == 0
|
2018-01-28 02:09:27 +00:00
|
|
|
return 0.0
|
2018-01-21 00:19:12 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# z value here represents a confidence level of 0.95
|
|
|
|
z = 1.96
|
|
|
|
phat = 1.0*pos/n
|
|
|
|
|
|
|
|
return (phat + z*z/(2*n) - z * Math.sqrt((phat*(1 - phat) + z*z/(4*n))/n))/(1 + z*z/n)
|
|
|
|
end
|
|
|
|
|
|
|
|
def elapsed_text(elapsed)
|
|
|
|
millis = elapsed.total_milliseconds
|
|
|
|
return "#{millis.round(2)}ms" if millis >= 1
|
|
|
|
|
|
|
|
"#{(millis * 1000).round(2)}µs"
|
|
|
|
end
|
|
|
|
|
2018-01-28 02:09:27 +00:00
|
|
|
def fetch_video(id, client)
|
2018-07-20 21:39:31 +00:00
|
|
|
info_channel = Channel(HTTP::Params).new
|
|
|
|
html_channel = Channel(XML::Node).new
|
2018-01-21 00:19:12 +00:00
|
|
|
|
2018-07-20 21:39:31 +00:00
|
|
|
spawn do
|
|
|
|
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&disable_polymer=1").body
|
|
|
|
html = XML.parse_html(html)
|
2018-01-21 00:19:12 +00:00
|
|
|
|
2018-07-20 21:39:31 +00:00
|
|
|
html_channel.send(html)
|
|
|
|
end
|
|
|
|
|
|
|
|
spawn do
|
|
|
|
info = client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body
|
2018-02-03 04:04:34 +00:00
|
|
|
info = HTTP::Params.parse(info)
|
2018-07-20 21:39:31 +00:00
|
|
|
|
2018-02-03 04:04:34 +00:00
|
|
|
if info["reason"]?
|
2018-07-20 21:39:31 +00:00
|
|
|
info = client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body
|
|
|
|
info = HTTP::Params.parse(info)
|
|
|
|
if info["reason"]?
|
|
|
|
raise info["reason"]
|
|
|
|
end
|
2018-02-03 04:04:34 +00:00
|
|
|
end
|
2018-07-20 21:39:31 +00:00
|
|
|
|
|
|
|
info_channel.send(info)
|
2018-01-21 00:19:12 +00:00
|
|
|
end
|
|
|
|
|
2018-07-20 21:39:31 +00:00
|
|
|
html = html_channel.receive
|
|
|
|
info = info_channel.receive
|
|
|
|
|
2018-01-28 02:09:27 +00:00
|
|
|
title = info["title"]
|
|
|
|
views = info["view_count"].to_i64
|
2018-06-01 22:24:16 +00:00
|
|
|
author = info["author"]
|
|
|
|
ucid = info["ucid"]
|
2018-01-21 00:19:12 +00:00
|
|
|
|
2018-01-28 02:09:27 +00:00
|
|
|
likes = html.xpath_node(%q(//button[@title="I like this"]/span))
|
2018-03-14 23:06:21 +00:00
|
|
|
likes = likes.try &.content.delete(",").try &.to_i
|
|
|
|
likes ||= 0
|
2018-01-28 02:09:27 +00:00
|
|
|
|
|
|
|
dislikes = html.xpath_node(%q(//button[@title="I dislike this"]/span))
|
2018-03-14 23:06:21 +00:00
|
|
|
dislikes = dislikes.try &.content.delete(",").try &.to_i
|
|
|
|
dislikes ||= 0
|
2018-01-28 02:09:27 +00:00
|
|
|
|
2018-02-27 00:58:45 +00:00
|
|
|
description = html.xpath_node(%q(//p[@id="eow-description"]))
|
|
|
|
description = description ? description.to_xml : ""
|
|
|
|
|
2018-01-28 02:09:27 +00:00
|
|
|
wilson_score = ci_lower_bound(likes, likes + dislikes)
|
|
|
|
|
2018-02-05 01:42:13 +00:00
|
|
|
published = html.xpath_node(%q(//strong[contains(@class,"watch-time-text")]))
|
2018-02-03 03:44:10 +00:00
|
|
|
if published
|
|
|
|
published = published.content
|
2018-02-05 01:42:13 +00:00
|
|
|
else
|
|
|
|
raise "Could not find date published"
|
|
|
|
end
|
|
|
|
|
2018-06-03 00:57:45 +00:00
|
|
|
published = published.split(" ")
|
|
|
|
published = published[-3..-1].join(" ")
|
2018-02-05 23:57:03 +00:00
|
|
|
if !published.includes?("ago")
|
2018-06-19 21:20:08 +00:00
|
|
|
published = Time.parse(published, "%b %-d, %Y", Time::Location.local)
|
2018-02-05 23:57:03 +00:00
|
|
|
else
|
|
|
|
# Time matches format "20 hours ago", "40 minutes ago"...
|
2018-02-05 01:42:13 +00:00
|
|
|
delta = published.split(" ")[0].to_i
|
|
|
|
case published
|
|
|
|
when .includes? "minute"
|
|
|
|
published = Time.now - delta.minutes
|
|
|
|
when .includes? "hour"
|
|
|
|
published = Time.now - delta.hours
|
2018-02-05 23:57:03 +00:00
|
|
|
else
|
2018-02-05 01:42:13 +00:00
|
|
|
raise "Could not parse #{published}"
|
2018-02-05 23:57:03 +00:00
|
|
|
end
|
2018-02-03 03:44:10 +00:00
|
|
|
end
|
|
|
|
|
2018-06-01 22:24:16 +00:00
|
|
|
video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description, nil, author, ucid)
|
2018-01-21 00:19:12 +00:00
|
|
|
|
|
|
|
return video
|
|
|
|
end
|
|
|
|
|
2018-01-28 02:09:27 +00:00
|
|
|
def get_video(id, client, db, refresh = true)
|
|
|
|
if db.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", id, as: Bool)
|
|
|
|
video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video)
|
2018-01-21 00:19:12 +00:00
|
|
|
|
2018-01-28 02:09:27 +00:00
|
|
|
# If record was last updated over an hour ago, refresh (expire param in response lasts for 6 hours)
|
2018-03-30 00:21:44 +00:00
|
|
|
if refresh && Time.now - video.updated > 1.hour
|
2018-03-29 03:29:54 +00:00
|
|
|
begin
|
2018-03-31 14:51:44 +00:00
|
|
|
video = fetch_video(id, client)
|
2018-03-30 00:03:00 +00:00
|
|
|
video_array = video.to_a
|
|
|
|
args = arg_array(video_array[1..-1], 2)
|
2018-03-29 03:29:54 +00:00
|
|
|
|
2018-07-09 18:00:15 +00:00
|
|
|
db.exec("UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,published,description,language,author,ucid)\
|
2018-03-30 00:03:00 +00:00
|
|
|
= (#{args}) WHERE id = $1", video_array)
|
2018-03-29 03:29:54 +00:00
|
|
|
rescue ex
|
|
|
|
db.exec("DELETE FROM videos * WHERE id = $1", id)
|
2018-03-31 14:51:44 +00:00
|
|
|
end
|
2018-01-21 00:19:12 +00:00
|
|
|
end
|
|
|
|
else
|
2018-01-28 02:09:27 +00:00
|
|
|
video = fetch_video(id, client)
|
2018-07-09 18:00:15 +00:00
|
|
|
video_array = video.to_a
|
|
|
|
args = arg_array(video_array)
|
|
|
|
|
|
|
|
db.exec("INSERT INTO videos VALUES (#{args}) ON CONFLICT (id) DO NOTHING", video_array)
|
2018-01-21 00:19:12 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
return video
|
|
|
|
end
|
2018-01-21 23:49:27 +00:00
|
|
|
|
2018-01-28 02:09:27 +00:00
|
|
|
def search(query, client)
|
2018-06-03 01:03:54 +00:00
|
|
|
html = client.get("https://www.youtube.com/results?q=#{query}&sp=EgIQAVAU&disable_polymer=1").body
|
2018-01-21 23:49:27 +00:00
|
|
|
|
|
|
|
html = XML.parse_html(html)
|
|
|
|
|
|
|
|
html.xpath_nodes(%q(//ol[@class="item-section"]/li)).each do |item|
|
|
|
|
root = item.xpath_node(%q(div[contains(@class,"yt-lockup-video")]/div))
|
|
|
|
if root
|
|
|
|
link = root.xpath_node(%q(div[contains(@class,"yt-lockup-thumbnail")]/a/@href))
|
|
|
|
if link
|
|
|
|
yield link.content.split("=")[1]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-02-03 20:41:59 +00:00
|
|
|
|
2018-02-22 19:01:37 +00:00
|
|
|
def splice(a, b)
|
2018-02-03 20:41:59 +00:00
|
|
|
c = a[0]
|
2018-02-12 04:06:29 +00:00
|
|
|
a[0] = a[b % a.size]
|
|
|
|
a[b % a.size] = c
|
|
|
|
return a
|
|
|
|
end
|
|
|
|
|
2018-07-17 15:53:17 +00:00
|
|
|
def decrypt_signature(a, code)
|
2018-02-13 16:44:11 +00:00
|
|
|
a = a.split("")
|
2018-06-01 22:26:00 +00:00
|
|
|
|
2018-07-17 15:53:17 +00:00
|
|
|
code.each do |item|
|
|
|
|
case item[:name]
|
|
|
|
when "a"
|
|
|
|
a.reverse!
|
|
|
|
when "b"
|
|
|
|
a.delete_at(0..(item[:value] - 1))
|
|
|
|
when "c"
|
|
|
|
a = splice(a, item[:value])
|
|
|
|
end
|
|
|
|
end
|
2018-03-07 04:03:45 +00:00
|
|
|
|
2018-02-13 16:44:11 +00:00
|
|
|
return a.join("")
|
|
|
|
end
|
|
|
|
|
2018-07-17 15:53:17 +00:00
|
|
|
def update_decrypt_function(client)
|
|
|
|
# Video with signature
|
|
|
|
document = client.get("/watch?v=CvFH_6DNRCY").body
|
|
|
|
url = document.match(/src="(?<url>\/yts\/jsbin\/player-.{9}\/en_US\/base.js)"/).not_nil!["url"]
|
|
|
|
player = client.get(url).body
|
|
|
|
|
|
|
|
function_name = player.match(/\(b\|\|\(b="signature"\),d.set\(b,(?<name>[a-zA-Z]{2})\(c\)\)\)/).not_nil!["name"]
|
|
|
|
function_body = player.match(/#{function_name}=function\(a\){(?<body>[^}]+)}/).not_nil!["body"]
|
|
|
|
function_body = function_body.split(";")[1..-2]
|
|
|
|
|
|
|
|
var_name = function_body[0][0, 2]
|
|
|
|
|
|
|
|
operations = {} of String => String
|
|
|
|
matches = player.delete("\n").match(/var #{var_name}={((?<op1>[a-zA-Z]{2}:[^}]+})),((?<op2>[a-zA-Z]{2}:[^}]+})),((?<op3>[a-zA-Z]{2}:[^}]+}))};/).not_nil!
|
|
|
|
3.times do |i|
|
|
|
|
operation = matches["op#{i + 1}"]
|
|
|
|
op_name = operation[0, 2]
|
|
|
|
|
|
|
|
op_body = operation.match(/\{[^}]+\}/).not_nil![0]
|
|
|
|
case op_body
|
|
|
|
when "{a.reverse()}"
|
|
|
|
operations[op_name] = "a"
|
|
|
|
when "{a.splice(0,b)}"
|
|
|
|
operations[op_name] = "b"
|
|
|
|
else
|
|
|
|
operations[op_name] = "c"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
decrypt_function = [] of {name: String, value: Int32}
|
|
|
|
function_body.each do |function|
|
|
|
|
function = function.lchop(var_name + ".")
|
|
|
|
op_name = function[0, 2]
|
|
|
|
|
|
|
|
function = function.lchop(op_name + "(a,")
|
|
|
|
value = function.rchop(")").to_i
|
|
|
|
|
|
|
|
decrypt_function << {name: operations[op_name], value: value}
|
|
|
|
end
|
|
|
|
|
|
|
|
return decrypt_function
|
|
|
|
end
|
|
|
|
|
2018-04-28 14:22:06 +00:00
|
|
|
def rank_videos(db, n, filter, url)
|
2018-02-05 23:56:40 +00:00
|
|
|
top = [] of {Float64, String}
|
|
|
|
|
2018-03-17 04:57:31 +00:00
|
|
|
db.query("SELECT id, wilson_score, published FROM videos WHERE views > 5000 ORDER BY published DESC LIMIT 1000") do |rs|
|
2018-02-05 23:56:40 +00:00
|
|
|
rs.each do
|
|
|
|
id = rs.read(String)
|
|
|
|
wilson_score = rs.read(Float64)
|
|
|
|
published = rs.read(Time)
|
|
|
|
|
|
|
|
# Exponential decay, older videos tend to rank lower
|
2018-02-10 16:06:37 +00:00
|
|
|
temperature = wilson_score * Math.exp(-0.000005*((Time.now - published).total_minutes))
|
2018-02-05 23:56:40 +00:00
|
|
|
top << {temperature, id}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
top.sort!
|
|
|
|
|
|
|
|
# Make hottest come first
|
|
|
|
top.reverse!
|
|
|
|
top = top.map { |a, b| b }
|
|
|
|
|
2018-03-17 00:36:49 +00:00
|
|
|
if filter
|
|
|
|
language_list = [] of String
|
|
|
|
top.each do |id|
|
|
|
|
if language_list.size == n
|
|
|
|
break
|
|
|
|
else
|
2018-04-28 14:22:06 +00:00
|
|
|
client = make_client(url)
|
2018-03-19 17:35:35 +00:00
|
|
|
begin
|
|
|
|
video = get_video(id, client, db)
|
|
|
|
rescue ex
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
2018-03-17 00:45:37 +00:00
|
|
|
if video.language
|
|
|
|
language = video.language
|
|
|
|
else
|
|
|
|
description = XML.parse(video.description)
|
|
|
|
content = [video.title, description.content].join(" ")
|
2018-03-19 17:35:35 +00:00
|
|
|
content = content[0, 10000]
|
2018-03-17 00:36:49 +00:00
|
|
|
|
2018-03-17 00:45:37 +00:00
|
|
|
results = DetectLanguage.detect(content)
|
|
|
|
language = results[0].language
|
2018-03-17 00:36:49 +00:00
|
|
|
|
2018-03-17 00:45:37 +00:00
|
|
|
db.exec("UPDATE videos SET language = $1 WHERE id = $2", language, id)
|
|
|
|
end
|
|
|
|
|
|
|
|
if language == "en"
|
2018-03-17 00:36:49 +00:00
|
|
|
language_list << id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return language_list
|
|
|
|
else
|
|
|
|
return top[0..n - 1]
|
|
|
|
end
|
2018-02-05 23:56:40 +00:00
|
|
|
end
|
2018-02-06 01:07:49 +00:00
|
|
|
|
2018-03-05 04:25:03 +00:00
|
|
|
def make_client(url)
|
|
|
|
context = OpenSSL::SSL::Context::Client.new
|
|
|
|
context.add_options(
|
|
|
|
OpenSSL::SSL::Options::ALL |
|
|
|
|
OpenSSL::SSL::Options::NO_SSL_V2 |
|
|
|
|
OpenSSL::SSL::Options::NO_SSL_V3
|
|
|
|
)
|
2018-02-27 00:59:02 +00:00
|
|
|
client = HTTP::Client.new(url, context)
|
2018-03-04 16:59:03 +00:00
|
|
|
client.read_timeout = 10.seconds
|
|
|
|
client.connect_timeout = 10.seconds
|
2018-02-06 01:07:49 +00:00
|
|
|
return client
|
|
|
|
end
|
2018-03-03 21:06:14 +00:00
|
|
|
|
2018-03-04 16:59:03 +00:00
|
|
|
def get_reddit_comments(id, client, headers)
|
2018-03-04 15:31:26 +00:00
|
|
|
query = "(url:3D#{id}%20OR%20url:#{id})%20(site:youtube.com%20OR%20site:youtu.be)"
|
2018-03-04 16:59:03 +00:00
|
|
|
search_results = client.get("/search.json?q=#{query}", headers)
|
2018-03-09 16:55:14 +00:00
|
|
|
|
2018-03-04 16:59:03 +00:00
|
|
|
if search_results.status_code == 200
|
2018-04-21 23:04:01 +00:00
|
|
|
search_results = RedditThing.from_json(search_results.body)
|
|
|
|
|
|
|
|
thread = search_results.data.as(RedditListing).children.sort_by { |child| child.data.as(RedditLink).score }[-1]
|
|
|
|
thread = thread.data.as(RedditLink)
|
2018-03-09 16:55:14 +00:00
|
|
|
|
2018-04-21 23:04:01 +00:00
|
|
|
result = client.get("/r/#{thread.subreddit}/comments/#{thread.id}?limit=100&sort=top", headers).body
|
|
|
|
result = Array(RedditThing).from_json(result)
|
2018-03-04 16:59:03 +00:00
|
|
|
elsif search_results.status_code == 302
|
2018-04-21 23:04:01 +00:00
|
|
|
result = client.get(search_results.headers["Location"], headers).body
|
|
|
|
result = Array(RedditThing).from_json(result)
|
2018-03-04 16:59:03 +00:00
|
|
|
|
2018-04-21 23:04:01 +00:00
|
|
|
thread = result[0].data.as(RedditListing).children[0].data.as(RedditLink)
|
2018-03-04 16:59:03 +00:00
|
|
|
else
|
|
|
|
raise "Got error code #{search_results.status_code}"
|
2018-03-04 01:10:25 +00:00
|
|
|
end
|
|
|
|
|
2018-04-21 23:04:01 +00:00
|
|
|
comments = result[1].data.as(RedditListing).children
|
2018-03-04 01:10:25 +00:00
|
|
|
return comments, thread
|
2018-03-03 21:06:14 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def template_comments(root)
|
|
|
|
html = ""
|
|
|
|
root.each do |child|
|
2018-04-21 23:04:01 +00:00
|
|
|
if child.data.is_a?(RedditComment)
|
|
|
|
child = child.data.as(RedditComment)
|
|
|
|
author = child.author
|
|
|
|
score = child.score
|
|
|
|
body_html = HTML.unescape(child.body_html)
|
2018-03-03 21:06:14 +00:00
|
|
|
|
|
|
|
replies_html = ""
|
2018-04-21 23:04:01 +00:00
|
|
|
if child.replies.is_a?(RedditThing)
|
|
|
|
replies = child.replies.as(RedditThing)
|
|
|
|
replies_html = template_comments(replies.data.as(RedditListing).children)
|
2018-03-03 21:06:14 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
content = <<-END_HTML
|
|
|
|
<p>
|
2018-03-07 04:03:45 +00:00
|
|
|
<a href="javascript:void(0)" onclick="toggle(this)">[ - ]</a> #{score} <b>#{author}</b>
|
2018-03-03 21:06:14 +00:00
|
|
|
</p>
|
2018-03-04 04:36:18 +00:00
|
|
|
<div>
|
|
|
|
#{body_html}
|
2018-03-03 21:06:14 +00:00
|
|
|
#{replies_html}
|
2018-03-04 04:36:18 +00:00
|
|
|
</div>
|
2018-03-03 21:06:14 +00:00
|
|
|
END_HTML
|
|
|
|
|
2018-04-21 23:04:01 +00:00
|
|
|
if child.depth > 0
|
2018-03-03 21:06:14 +00:00
|
|
|
html += <<-END_HTML
|
|
|
|
<div class="pure-g">
|
2018-04-06 01:06:22 +00:00
|
|
|
<div class="pure-u-1-24">
|
|
|
|
</div>
|
2018-03-03 21:06:14 +00:00
|
|
|
<div class="pure-u-23-24">
|
|
|
|
#{content}
|
|
|
|
</div>
|
|
|
|
</div>
|
|
|
|
END_HTML
|
|
|
|
else
|
|
|
|
html += <<-END_HTML
|
|
|
|
<div class="pure-g">
|
|
|
|
<div class="pure-u-1">
|
|
|
|
#{content}
|
|
|
|
</div>
|
|
|
|
</div>
|
|
|
|
END_HTML
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
return html
|
|
|
|
end
|
2018-03-03 21:10:56 +00:00
|
|
|
|
|
|
|
def number_with_separator(number)
|
|
|
|
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
|
|
|
|
end
|
2018-03-04 14:54:19 +00:00
|
|
|
|
2018-03-30 00:03:00 +00:00
|
|
|
def arg_array(array, start = 1)
|
2018-04-01 14:46:13 +00:00
|
|
|
if array.size == 0
|
|
|
|
args = "NULL"
|
|
|
|
else
|
|
|
|
args = [] of String
|
|
|
|
(start..array.size + start - 1).each { |i| args << "($#{i})" }
|
|
|
|
args = args.join(",")
|
|
|
|
end
|
2018-03-04 14:54:19 +00:00
|
|
|
|
|
|
|
return args
|
|
|
|
end
|
2018-03-07 04:00:35 +00:00
|
|
|
|
|
|
|
def add_alt_links(html)
|
|
|
|
alt_links = [] of {Int32, String}
|
|
|
|
|
|
|
|
# This is painful but is likely the only way to accomplish this in Crystal,
|
|
|
|
# as Crystigiri and others are not able to insert XML Nodes into a document.
|
|
|
|
# The goal here is to use as little regex as possible
|
|
|
|
html.scan(/<a[^>]*>([^<]+)<\/a>/) do |match|
|
|
|
|
anchor = XML.parse_html(match[0])
|
|
|
|
anchor = anchor.xpath_node("//a").not_nil!
|
|
|
|
url = URI.parse(HTML.unescape(anchor["href"]))
|
|
|
|
|
2018-06-27 17:00:07 +00:00
|
|
|
if ["www.youtube.com", "m.youtube.com"].includes?(url.host)
|
2018-03-07 04:00:35 +00:00
|
|
|
alt_link = <<-END_HTML
|
2018-03-07 04:03:45 +00:00
|
|
|
<a href="#{url.full_path}">
|
2018-03-07 04:00:35 +00:00
|
|
|
<i class="fa fa-link" aria-hidden="true"></i>
|
|
|
|
</a>
|
|
|
|
END_HTML
|
2018-03-09 20:06:35 +00:00
|
|
|
elsif url.host == "youtu.be"
|
|
|
|
alt_link = <<-END_HTML
|
2018-05-08 01:50:55 +00:00
|
|
|
<a href="/watch?v=#{url.path.try &.lchop("/")}&#{url.query}">
|
2018-03-09 20:06:35 +00:00
|
|
|
<i class="fa fa-link" aria-hidden="true"></i>
|
|
|
|
</a>
|
|
|
|
END_HTML
|
|
|
|
else
|
|
|
|
alt_link = ""
|
2018-03-07 04:00:35 +00:00
|
|
|
end
|
2018-03-09 20:06:35 +00:00
|
|
|
|
|
|
|
alt_links << {match.end.not_nil!, alt_link}
|
2018-03-07 04:00:35 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
alt_links.reverse!
|
|
|
|
alt_links.each do |position, alt_link|
|
|
|
|
html = html.insert(position, alt_link)
|
|
|
|
end
|
|
|
|
|
|
|
|
return html
|
|
|
|
end
|
|
|
|
|
|
|
|
def fill_links(html, scheme, host)
|
|
|
|
html = XML.parse_html(html)
|
|
|
|
|
|
|
|
html.xpath_nodes("//a").each do |match|
|
|
|
|
url = URI.parse(match["href"])
|
2018-03-26 03:21:24 +00:00
|
|
|
# Reddit links don't have host
|
2018-04-06 01:06:22 +00:00
|
|
|
if !url.host && !match["href"].starts_with?("javascript")
|
2018-03-07 04:00:35 +00:00
|
|
|
url.scheme = scheme
|
|
|
|
url.host = host
|
|
|
|
match["href"] = url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
html = html.to_xml
|
|
|
|
end
|
2018-03-16 16:40:29 +00:00
|
|
|
|
|
|
|
def login_req(login_form, f_req)
|
|
|
|
data = {
|
|
|
|
"pstMsg" => "1",
|
|
|
|
"checkConnection" => "youtube",
|
|
|
|
"checkedDomains" => "youtube",
|
|
|
|
"hl" => "en",
|
|
|
|
"deviceinfo" => %q([null,null,null,[],null,"US",null,null,[],"GlifWebSignIn",null,[null,null,[]]]),
|
|
|
|
"f.req" => f_req,
|
|
|
|
"flowName" => "GlifWebSignIn",
|
|
|
|
"flowEntry" => "ServiceLogin",
|
|
|
|
}
|
|
|
|
|
2018-04-28 14:27:05 +00:00
|
|
|
data = login_form.merge(data)
|
2018-03-16 16:40:29 +00:00
|
|
|
|
|
|
|
return HTTP::Params.encode(data)
|
|
|
|
end
|
2018-03-25 03:38:35 +00:00
|
|
|
|
2018-07-05 23:17:27 +00:00
|
|
|
def get_channel(id, client, db, refresh = true, pull_all_videos = true)
|
2018-03-25 03:38:35 +00:00
|
|
|
if db.query_one?("SELECT EXISTS (SELECT true FROM channels WHERE id = $1)", id, as: Bool)
|
|
|
|
channel = db.query_one("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
|
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
if refresh && Time.now - channel.updated > 10.minutes
|
2018-07-05 23:17:27 +00:00
|
|
|
channel = fetch_channel(id, client, db, pull_all_videos)
|
2018-03-29 04:05:20 +00:00
|
|
|
channel_array = channel.to_a
|
2018-03-26 03:18:29 +00:00
|
|
|
args = arg_array(channel_array)
|
|
|
|
|
2018-03-29 04:05:20 +00:00
|
|
|
db.exec("INSERT INTO channels VALUES (#{args}) \
|
2018-03-29 23:50:24 +00:00
|
|
|
ON CONFLICT (id) DO UPDATE SET updated = $3", channel_array)
|
2018-03-25 03:38:35 +00:00
|
|
|
end
|
|
|
|
else
|
2018-07-05 23:17:27 +00:00
|
|
|
channel = fetch_channel(id, client, db, pull_all_videos)
|
2018-03-25 03:38:35 +00:00
|
|
|
args = arg_array(channel.to_a)
|
|
|
|
db.exec("INSERT INTO channels VALUES (#{args})", channel.to_a)
|
|
|
|
end
|
|
|
|
|
|
|
|
return channel
|
|
|
|
end
|
|
|
|
|
2018-07-05 23:17:27 +00:00
|
|
|
def fetch_channel(ucid, client, db, pull_all_videos = true)
|
2018-06-03 02:53:11 +00:00
|
|
|
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}").body
|
2018-03-25 03:38:35 +00:00
|
|
|
rss = XML.parse_html(rss)
|
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
author = rss.xpath_node(%q(//feed/title))
|
|
|
|
if !author
|
|
|
|
raise "Deleted or invalid channel"
|
|
|
|
end
|
|
|
|
author = author.content
|
2018-03-29 03:29:54 +00:00
|
|
|
|
2018-07-05 23:17:27 +00:00
|
|
|
if !pull_all_videos
|
2018-06-06 18:21:53 +00:00
|
|
|
rss.xpath_nodes("//feed/entry").each do |entry|
|
|
|
|
video_id = entry.xpath_node("videoid").not_nil!.content
|
|
|
|
title = entry.xpath_node("title").not_nil!.content
|
2018-06-19 21:20:08 +00:00
|
|
|
published = Time.parse(entry.xpath_node("published").not_nil!.content, "%FT%X%z", Time::Location.local)
|
|
|
|
updated = Time.parse(entry.xpath_node("updated").not_nil!.content, "%FT%X%z", Time::Location.local)
|
2018-06-06 18:21:53 +00:00
|
|
|
author = entry.xpath_node("author/name").not_nil!.content
|
|
|
|
ucid = entry.xpath_node("channelid").not_nil!.content
|
2018-03-29 03:29:54 +00:00
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
video = ChannelVideo.new(video_id, title, published, Time.now, ucid, author)
|
2018-03-29 03:29:54 +00:00
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
db.exec("UPDATE users SET notifications = notifications || $1 \
|
|
|
|
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, Time.now, ucid)
|
2018-04-01 00:09:27 +00:00
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
video_array = video.to_a
|
|
|
|
args = arg_array(video_array)
|
2018-07-09 18:00:15 +00:00
|
|
|
db.exec("INSERT INTO channel_videos VALUES (#{args}) \
|
|
|
|
ON CONFLICT (id) DO UPDATE SET title = $2, published = $3, \
|
|
|
|
updated = $4, ucid = $5, author = $6", video_array)
|
2018-06-06 18:21:53 +00:00
|
|
|
end
|
|
|
|
else
|
|
|
|
videos = [] of ChannelVideo
|
|
|
|
page = 1
|
|
|
|
|
|
|
|
loop do
|
|
|
|
url = produce_videos_url(ucid, page)
|
|
|
|
response = client.get(url)
|
|
|
|
|
|
|
|
json = JSON.parse(response.body)
|
|
|
|
content_html = json["content_html"].as_s
|
|
|
|
if content_html.empty?
|
|
|
|
# If we don't get anything, move on
|
|
|
|
break
|
|
|
|
end
|
|
|
|
document = XML.parse_html(content_html)
|
|
|
|
|
|
|
|
document.xpath_nodes(%q(//li[contains(@class, "channels-content-item")])).each do |item|
|
|
|
|
root = item.xpath_node(%q(div/div/div[@class="yt-lockup-content"]))
|
|
|
|
if !root
|
|
|
|
raise "could not find root"
|
|
|
|
end
|
2018-03-29 03:29:54 +00:00
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
anchor = root.xpath_node(%q(h3[contains(@class,"yt-lockup-title")]/a))
|
|
|
|
if !anchor
|
|
|
|
raise "could not find anchor"
|
|
|
|
end
|
|
|
|
title = anchor.content.strip
|
|
|
|
video_id = anchor["href"].lchop("/watch?v=")
|
|
|
|
|
|
|
|
published = root.xpath_node(%q(div[@class="yt-lockup-meta"]/ul/li[2]))
|
|
|
|
if !published
|
|
|
|
# This happens on Youtube red videos, here we just skip them
|
|
|
|
next
|
|
|
|
end
|
|
|
|
published = published.content.split(" ")
|
|
|
|
span = published[0].to_i
|
|
|
|
case published[1]
|
|
|
|
when .includes? "second"
|
|
|
|
span = span.seconds
|
|
|
|
when .includes? "minute"
|
|
|
|
span = span.minutes
|
|
|
|
when .includes? "hour"
|
|
|
|
span = span.hours
|
|
|
|
when .includes? "day"
|
|
|
|
span = span.days
|
|
|
|
when .includes? "week"
|
|
|
|
span = span.weeks
|
|
|
|
when .includes? "month"
|
|
|
|
span = span.months
|
|
|
|
when .includes? "year"
|
|
|
|
span = span.years
|
|
|
|
else
|
|
|
|
raise "Unrecognized time: #{published[1]}"
|
|
|
|
end
|
|
|
|
|
|
|
|
published = Time.now - span
|
|
|
|
|
|
|
|
videos << ChannelVideo.new(video_id, title, published, Time.now, ucid, author)
|
|
|
|
end
|
|
|
|
|
|
|
|
if document.xpath_nodes(%q(//li[contains(@class, "channels-content-item")])).size < 30
|
|
|
|
break
|
|
|
|
end
|
|
|
|
|
|
|
|
page += 1
|
|
|
|
end
|
|
|
|
|
2018-07-17 14:20:07 +00:00
|
|
|
video_ids = [] of String
|
2018-06-06 18:21:53 +00:00
|
|
|
videos.each do |video|
|
|
|
|
db.exec("UPDATE users SET notifications = notifications || $1 \
|
|
|
|
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, video.published, ucid)
|
2018-07-17 14:20:07 +00:00
|
|
|
video_ids << video.id
|
2018-06-06 18:21:53 +00:00
|
|
|
|
|
|
|
video_array = video.to_a
|
|
|
|
args = arg_array(video_array)
|
2018-07-05 23:17:27 +00:00
|
|
|
db.exec("INSERT INTO channel_videos VALUES (#{args}) ON CONFLICT (id) DO NOTHING", video_array)
|
2018-06-06 18:21:53 +00:00
|
|
|
end
|
2018-07-17 14:20:07 +00:00
|
|
|
|
|
|
|
# When a video is deleted from a channel, we find and remove it here
|
|
|
|
db.exec("DELETE FROM channel_videos * WHERE NOT id = ANY ('{#{video_ids.map { |a| %("#{a}") }.join(",")}}') AND ucid = $1", ucid)
|
2018-06-06 18:21:53 +00:00
|
|
|
end
|
2018-03-25 03:38:35 +00:00
|
|
|
|
2018-06-03 02:53:11 +00:00
|
|
|
channel = InvidiousChannel.new(ucid, author, Time.now)
|
2018-03-25 03:38:35 +00:00
|
|
|
|
|
|
|
return channel
|
|
|
|
end
|
2018-03-30 02:41:05 +00:00
|
|
|
|
2018-07-16 16:24:24 +00:00
|
|
|
def get_user(sid, client, headers, db, refresh = true)
|
2018-03-30 02:41:05 +00:00
|
|
|
if db.query_one?("SELECT EXISTS (SELECT true FROM users WHERE id = $1)", sid, as: Bool)
|
|
|
|
user = db.query_one("SELECT * FROM users WHERE id = $1", sid, as: User)
|
|
|
|
|
2018-07-16 16:24:24 +00:00
|
|
|
if refresh && Time.now - user.updated > 1.minute
|
2018-06-06 18:21:53 +00:00
|
|
|
user = fetch_user(sid, client, headers, db)
|
2018-03-30 02:41:05 +00:00
|
|
|
user_array = user.to_a
|
2018-07-18 20:40:50 +00:00
|
|
|
user_array[5] = user_array[5].to_json
|
2018-03-30 02:41:05 +00:00
|
|
|
args = arg_array(user_array)
|
2018-03-31 14:51:44 +00:00
|
|
|
|
2018-03-30 02:41:05 +00:00
|
|
|
db.exec("INSERT INTO users VALUES (#{args}) \
|
2018-05-08 02:57:47 +00:00
|
|
|
ON CONFLICT (email) DO UPDATE SET id = $1, updated = $2, subscriptions = $4", user_array)
|
2018-03-30 02:41:05 +00:00
|
|
|
end
|
|
|
|
else
|
2018-06-06 18:21:53 +00:00
|
|
|
user = fetch_user(sid, client, headers, db)
|
2018-03-31 15:30:17 +00:00
|
|
|
user_array = user.to_a
|
2018-07-18 20:49:01 +00:00
|
|
|
user_array[5] = user_array[5].to_json
|
2018-03-30 02:41:05 +00:00
|
|
|
args = arg_array(user.to_a)
|
2018-03-31 15:30:17 +00:00
|
|
|
|
|
|
|
db.exec("INSERT INTO users VALUES (#{args}) \
|
|
|
|
ON CONFLICT (email) DO UPDATE SET id = $1, updated = $2, subscriptions = $4", user_array)
|
2018-03-30 02:41:05 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
return user
|
|
|
|
end
|
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
def fetch_user(sid, client, headers, db)
|
2018-04-29 14:40:33 +00:00
|
|
|
feed = client.get("/subscription_manager?disable_polymer=1", headers)
|
|
|
|
feed = XML.parse_html(feed.body)
|
2018-03-30 02:41:05 +00:00
|
|
|
|
|
|
|
channels = [] of String
|
2018-05-04 01:37:17 +00:00
|
|
|
feed.xpath_nodes(%q(//ul[@id="guide-channels"]/li/a)).each do |channel|
|
|
|
|
if !["Popular on YouTube", "Music", "Sports", "Gaming"].includes? channel["title"]
|
2018-05-04 03:39:02 +00:00
|
|
|
channel_id = channel["href"].lstrip("/channel/")
|
2018-03-31 15:30:17 +00:00
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
begin
|
|
|
|
channel = get_channel(channel_id, client, db, false, false)
|
|
|
|
channels << channel.id
|
|
|
|
rescue ex
|
|
|
|
next
|
|
|
|
end
|
2018-05-04 01:37:17 +00:00
|
|
|
end
|
2018-03-31 15:30:17 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"]))
|
|
|
|
if email
|
2018-04-10 04:15:01 +00:00
|
|
|
email = email.content.strip
|
2018-03-31 15:30:17 +00:00
|
|
|
else
|
|
|
|
email = ""
|
2018-03-30 02:41:05 +00:00
|
|
|
end
|
|
|
|
|
2018-07-20 16:19:49 +00:00
|
|
|
token = Base64.encode(Random::Secure.random_bytes(32))
|
|
|
|
|
|
|
|
user = User.new(sid, Time.now, [] of String, channels, email, DEFAULT_USER_PREFERENCES, nil, token)
|
2018-07-18 19:26:02 +00:00
|
|
|
return user
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_user(sid, email, password)
|
|
|
|
password = Crypto::Bcrypt::Password.create(password, cost: 10)
|
2018-07-20 16:19:49 +00:00
|
|
|
token = Base64.encode(Random::Secure.random_bytes(32))
|
|
|
|
|
|
|
|
user = User.new(sid, Time.now, [] of String, [] of String, email, DEFAULT_USER_PREFERENCES, password.to_s, token)
|
2018-07-18 19:26:02 +00:00
|
|
|
|
2018-03-30 02:41:05 +00:00
|
|
|
return user
|
|
|
|
end
|
2018-05-08 01:38:13 +00:00
|
|
|
|
|
|
|
def decode_time(string)
|
|
|
|
time = string.try &.to_f?
|
|
|
|
|
|
|
|
if !time
|
|
|
|
hours = /(?<hours>\d+)h/.match(string).try &.["hours"].try &.to_i
|
|
|
|
hours ||= 0
|
|
|
|
|
2018-07-18 20:17:02 +00:00
|
|
|
minutes = /(?<minutes>\d+)m(?!s)/.match(string).try &.["minutes"].try &.to_i
|
2018-05-08 01:38:13 +00:00
|
|
|
minutes ||= 0
|
|
|
|
|
|
|
|
seconds = /(?<seconds>\d+)s/.match(string).try &.["seconds"].try &.to_i
|
|
|
|
seconds ||= 0
|
|
|
|
|
|
|
|
millis = /(?<millis>\d+)ms/.match(string).try &.["millis"].try &.to_i
|
|
|
|
millis ||= 0
|
|
|
|
|
|
|
|
time = hours * 3600 + minutes * 60 + seconds + millis / 1000
|
|
|
|
end
|
|
|
|
|
|
|
|
return time
|
|
|
|
end
|
2018-06-03 00:52:58 +00:00
|
|
|
|
|
|
|
def produce_playlist_url(ucid, index)
|
2018-06-06 18:21:53 +00:00
|
|
|
ucid = ucid.lchop("UC")
|
2018-06-03 00:52:58 +00:00
|
|
|
ucid = "VLUU" + ucid
|
|
|
|
|
|
|
|
continuation = write_var_int(index)
|
|
|
|
continuation.unshift(0x08_u8)
|
|
|
|
slice = continuation.to_unsafe.to_slice(continuation.size)
|
|
|
|
|
|
|
|
continuation = Base64.urlsafe_encode(slice, false)
|
|
|
|
continuation = "PT:" + continuation
|
|
|
|
continuation = continuation.bytes
|
|
|
|
continuation.unshift(0x7a_u8, continuation.size.to_u8)
|
|
|
|
|
|
|
|
slice = continuation.to_unsafe.to_slice(continuation.size)
|
|
|
|
continuation = Base64.urlsafe_encode(slice)
|
|
|
|
continuation = URI.escape(continuation)
|
|
|
|
continuation = continuation.bytes
|
|
|
|
continuation.unshift(continuation.size.to_u8)
|
|
|
|
|
|
|
|
continuation.unshift(ucid.size.to_u8)
|
|
|
|
continuation = ucid.bytes + continuation
|
|
|
|
continuation.unshift(0x12.to_u8, ucid.size.to_u8)
|
|
|
|
continuation.unshift(0xe2_u8, 0xa9_u8, 0x85_u8, 0xb2_u8, 2_u8, continuation.size.to_u8)
|
|
|
|
|
|
|
|
slice = continuation.to_unsafe.to_slice(continuation.size)
|
|
|
|
continuation = Base64.urlsafe_encode(slice)
|
|
|
|
continuation = URI.escape(continuation)
|
|
|
|
|
|
|
|
url = "/browse_ajax?action_continuation=1&continuation=#{continuation}"
|
|
|
|
|
|
|
|
return url
|
|
|
|
end
|
|
|
|
|
2018-06-06 18:21:53 +00:00
|
|
|
def produce_videos_url(ucid, page)
|
|
|
|
page = "#{page}"
|
|
|
|
|
|
|
|
meta = "\x12\x06videos \x00\x30\x01\x38\x01\x60\x01\x6a\x00\x7a"
|
|
|
|
meta += page.size.to_u8.unsafe_chr
|
|
|
|
meta += page
|
|
|
|
meta += "\xb8\x01\x00"
|
|
|
|
|
|
|
|
meta = Base64.urlsafe_encode(meta)
|
|
|
|
meta = URI.escape(meta)
|
|
|
|
|
|
|
|
continuation = "\x12"
|
|
|
|
continuation += ucid.size.to_u8.unsafe_chr
|
|
|
|
continuation += ucid
|
|
|
|
continuation += "\x1a"
|
|
|
|
continuation += meta.size.to_u8.unsafe_chr
|
|
|
|
continuation += meta
|
|
|
|
|
|
|
|
continuation = continuation.size.to_u8.unsafe_chr + continuation
|
|
|
|
continuation = "\xe2\xa9\x85\xb2\x02" + continuation
|
|
|
|
|
|
|
|
continuation = Base64.urlsafe_encode(continuation)
|
|
|
|
continuation = URI.escape(continuation)
|
|
|
|
|
|
|
|
url = "/browse_ajax?continuation=#{continuation}"
|
|
|
|
|
|
|
|
return url
|
|
|
|
end
|
|
|
|
|
2018-06-03 00:52:58 +00:00
|
|
|
def read_var_int(bytes)
|
|
|
|
numRead = 0
|
|
|
|
result = 0
|
|
|
|
|
|
|
|
read = bytes[numRead]
|
|
|
|
|
|
|
|
if bytes.size == 1
|
|
|
|
result = bytes[0].to_i32
|
|
|
|
else
|
|
|
|
while ((read & 0b10000000) != 0)
|
|
|
|
read = bytes[numRead].to_u64
|
|
|
|
value = (read & 0b01111111)
|
|
|
|
result |= (value << (7 * numRead))
|
|
|
|
|
|
|
|
numRead += 1
|
|
|
|
if numRead > 5
|
|
|
|
raise "VarInt is too big"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
return result
|
|
|
|
end
|
|
|
|
|
|
|
|
def write_var_int(value : Int)
|
|
|
|
bytes = [] of UInt8
|
|
|
|
value = value.to_u32
|
|
|
|
|
|
|
|
if value == 0
|
|
|
|
bytes = [0_u8]
|
|
|
|
else
|
|
|
|
while value != 0
|
|
|
|
temp = (value & 0b01111111).to_u8
|
|
|
|
value = value >> 7
|
|
|
|
|
|
|
|
if value != 0
|
|
|
|
temp |= 0b10000000
|
|
|
|
end
|
|
|
|
|
|
|
|
bytes << temp
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
return bytes
|
|
|
|
end
|
2018-07-18 19:26:02 +00:00
|
|
|
|
|
|
|
def generate_captcha(key)
|
|
|
|
minute = Random::Secure.rand(12)
|
|
|
|
minute_angle = minute * 30
|
|
|
|
minute = minute * 5
|
|
|
|
|
|
|
|
hour = Random::Secure.rand(12)
|
|
|
|
hour_angle = hour * 30 + minute_angle.to_f / 12
|
|
|
|
if hour == 0
|
|
|
|
hour = 12
|
|
|
|
end
|
|
|
|
|
|
|
|
clock_svg = <<-END_SVG
|
|
|
|
<svg viewBox="0 0 100 100" width="200px">
|
|
|
|
<circle cx="50" cy="50" r="45" fill="#eee" stroke="black" stroke-width="2"></circle>
|
|
|
|
|
|
|
|
<circle id="hour1" cx="69" cy="17.091" r="2" fill="black"></circle>
|
|
|
|
<circle id="hour2" cx="82.909" cy="31" r="2" fill="black"></circle>
|
|
|
|
<circle id="hour3" cx="88" cy="50" r="2" fill="black"></circle>
|
|
|
|
|
|
|
|
<circle id="hour4" cx="82.909" cy="69" r="2" fill="black"></circle>
|
|
|
|
<circle id="hour5" cx="69" cy="82.909" r="2" fill="black"></circle>
|
|
|
|
<circle id="hour6" cx="50" cy="88" r="2" fill="black"></circle>
|
|
|
|
|
|
|
|
<circle id="hour7" cx="31" cy="82.909" r="2" fill="black"></circle>
|
|
|
|
<circle id="hour8" cx="17.091" cy="69" r="2" fill="black"></circle>
|
|
|
|
<circle id="hour9" cx="12" cy="50" r="2" fill="black"></circle>
|
|
|
|
|
|
|
|
<circle id="hour10" cx="17.091" cy="31" r="2" fill="black"></circle>
|
|
|
|
<circle id="hour11" cx="31" cy="17.091" r="2" fill="black"></circle>
|
|
|
|
<circle id="hour12" cx="50" cy="12" r="2" fill="black"></circle>
|
|
|
|
|
|
|
|
<circle cx="50" cy="50" r="3" fill="black"></circle>
|
|
|
|
<line id="minute" transform="rotate(#{minute_angle}, 50, 50)" x1="50" y1="50" x2="50" y2="16" fill="black" stroke="black" stroke-width="2"></line>
|
|
|
|
<line id="hour" transform="rotate(#{hour_angle}, 50, 50)" x1="50" y1="50" x2="50" y2="24" fill="black" stroke="black" stroke-width="2"></line>
|
|
|
|
</svg>
|
|
|
|
END_SVG
|
|
|
|
|
|
|
|
challenge = ""
|
|
|
|
convert = Process.run(%(convert -density 1200 -resize 400x400 -background none svg:- png:-), shell: true, input: IO::Memory.new(clock_svg), output: Process::Redirect::Pipe) do |proc|
|
|
|
|
challenge = proc.output.gets_to_end
|
|
|
|
challenge = Base64.encode(challenge)
|
|
|
|
challenge = "data:image/png; base64, #{challenge}"
|
|
|
|
end
|
|
|
|
|
|
|
|
answer = "#{hour}:#{minute.to_s.rjust(2, '0')}"
|
|
|
|
token = OpenSSL::HMAC.digest(:sha256, key, answer)
|
|
|
|
token = Base64.encode(token)
|
|
|
|
|
|
|
|
return {challenge: challenge, token: token}
|
|
|
|
end
|